var/home/core/zuul-output/0000755000175000017500000000000015067135471014536 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015067153141015474 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005347674215067153132017722 0ustar rootrootOct 01 05:29:09 crc systemd[1]: Starting Kubernetes Kubelet... Oct 01 05:29:09 crc restorecon[4656]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 05:29:09 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 05:29:10 crc restorecon[4656]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 05:29:10 crc restorecon[4656]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 01 05:29:11 crc kubenswrapper[4661]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 01 05:29:11 crc kubenswrapper[4661]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 01 05:29:11 crc kubenswrapper[4661]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 01 05:29:11 crc kubenswrapper[4661]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 01 05:29:11 crc kubenswrapper[4661]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 01 05:29:11 crc kubenswrapper[4661]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.485513 4661 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492130 4661 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492163 4661 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492173 4661 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492182 4661 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492191 4661 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492201 4661 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492210 4661 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492222 4661 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492233 4661 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492242 4661 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492251 4661 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492260 4661 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492268 4661 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492276 4661 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492286 4661 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492310 4661 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492320 4661 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492329 4661 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492337 4661 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492346 4661 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492381 4661 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492393 4661 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492401 4661 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492410 4661 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492418 4661 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492426 4661 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492434 4661 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492443 4661 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492451 4661 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492459 4661 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492466 4661 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492473 4661 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492482 4661 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492491 4661 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492500 4661 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492508 4661 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492516 4661 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492526 4661 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492535 4661 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492544 4661 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492552 4661 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492561 4661 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492569 4661 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492578 4661 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492585 4661 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492593 4661 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492601 4661 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492609 4661 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492617 4661 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492625 4661 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492658 4661 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492668 4661 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492676 4661 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492684 4661 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492696 4661 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492706 4661 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492714 4661 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492724 4661 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492732 4661 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492740 4661 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492748 4661 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492756 4661 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492763 4661 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492771 4661 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492779 4661 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492787 4661 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492795 4661 feature_gate.go:330] unrecognized feature gate: Example Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492804 4661 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492812 4661 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492819 4661 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.492827 4661 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493705 4661 flags.go:64] FLAG: --address="0.0.0.0" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493730 4661 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493747 4661 flags.go:64] FLAG: --anonymous-auth="true" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493759 4661 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493771 4661 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493780 4661 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493792 4661 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493803 4661 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493812 4661 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493822 4661 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493832 4661 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493841 4661 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493850 4661 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493859 4661 flags.go:64] FLAG: --cgroup-root="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493868 4661 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493877 4661 flags.go:64] FLAG: --client-ca-file="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493886 4661 flags.go:64] FLAG: --cloud-config="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493894 4661 flags.go:64] FLAG: --cloud-provider="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493903 4661 flags.go:64] FLAG: --cluster-dns="[]" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493914 4661 flags.go:64] FLAG: --cluster-domain="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493922 4661 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493932 4661 flags.go:64] FLAG: --config-dir="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493941 4661 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493950 4661 flags.go:64] FLAG: --container-log-max-files="5" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493962 4661 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493972 4661 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493981 4661 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493990 4661 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.493999 4661 flags.go:64] FLAG: --contention-profiling="false" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494009 4661 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494018 4661 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494027 4661 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494040 4661 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494051 4661 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494060 4661 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494069 4661 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494078 4661 flags.go:64] FLAG: --enable-load-reader="false" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494086 4661 flags.go:64] FLAG: --enable-server="true" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494095 4661 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494106 4661 flags.go:64] FLAG: --event-burst="100" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494116 4661 flags.go:64] FLAG: --event-qps="50" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494125 4661 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494134 4661 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494143 4661 flags.go:64] FLAG: --eviction-hard="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494154 4661 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494163 4661 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494172 4661 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494181 4661 flags.go:64] FLAG: --eviction-soft="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494190 4661 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494198 4661 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494208 4661 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494216 4661 flags.go:64] FLAG: --experimental-mounter-path="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494225 4661 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494234 4661 flags.go:64] FLAG: --fail-swap-on="true" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494242 4661 flags.go:64] FLAG: --feature-gates="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494253 4661 flags.go:64] FLAG: --file-check-frequency="20s" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494263 4661 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494272 4661 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494281 4661 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494290 4661 flags.go:64] FLAG: --healthz-port="10248" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494299 4661 flags.go:64] FLAG: --help="false" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494310 4661 flags.go:64] FLAG: --hostname-override="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494319 4661 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494328 4661 flags.go:64] FLAG: --http-check-frequency="20s" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494337 4661 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494345 4661 flags.go:64] FLAG: --image-credential-provider-config="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494355 4661 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494363 4661 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494373 4661 flags.go:64] FLAG: --image-service-endpoint="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494382 4661 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494390 4661 flags.go:64] FLAG: --kube-api-burst="100" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494400 4661 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494409 4661 flags.go:64] FLAG: --kube-api-qps="50" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494418 4661 flags.go:64] FLAG: --kube-reserved="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494427 4661 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494435 4661 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494445 4661 flags.go:64] FLAG: --kubelet-cgroups="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494453 4661 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494462 4661 flags.go:64] FLAG: --lock-file="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494471 4661 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494480 4661 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494489 4661 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494503 4661 flags.go:64] FLAG: --log-json-split-stream="false" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494512 4661 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494521 4661 flags.go:64] FLAG: --log-text-split-stream="false" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494530 4661 flags.go:64] FLAG: --logging-format="text" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494540 4661 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494551 4661 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494560 4661 flags.go:64] FLAG: --manifest-url="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494570 4661 flags.go:64] FLAG: --manifest-url-header="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494582 4661 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494591 4661 flags.go:64] FLAG: --max-open-files="1000000" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494603 4661 flags.go:64] FLAG: --max-pods="110" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494612 4661 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494622 4661 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494656 4661 flags.go:64] FLAG: --memory-manager-policy="None" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494666 4661 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494675 4661 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494684 4661 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494693 4661 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494713 4661 flags.go:64] FLAG: --node-status-max-images="50" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494723 4661 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494732 4661 flags.go:64] FLAG: --oom-score-adj="-999" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494741 4661 flags.go:64] FLAG: --pod-cidr="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494751 4661 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494764 4661 flags.go:64] FLAG: --pod-manifest-path="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494773 4661 flags.go:64] FLAG: --pod-max-pids="-1" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494783 4661 flags.go:64] FLAG: --pods-per-core="0" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494792 4661 flags.go:64] FLAG: --port="10250" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494801 4661 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494810 4661 flags.go:64] FLAG: --provider-id="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494818 4661 flags.go:64] FLAG: --qos-reserved="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494828 4661 flags.go:64] FLAG: --read-only-port="10255" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494836 4661 flags.go:64] FLAG: --register-node="true" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494845 4661 flags.go:64] FLAG: --register-schedulable="true" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494854 4661 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494868 4661 flags.go:64] FLAG: --registry-burst="10" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494877 4661 flags.go:64] FLAG: --registry-qps="5" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494886 4661 flags.go:64] FLAG: --reserved-cpus="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494895 4661 flags.go:64] FLAG: --reserved-memory="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494905 4661 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494914 4661 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494923 4661 flags.go:64] FLAG: --rotate-certificates="false" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494935 4661 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494944 4661 flags.go:64] FLAG: --runonce="false" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494953 4661 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494963 4661 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494972 4661 flags.go:64] FLAG: --seccomp-default="false" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494981 4661 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494990 4661 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.494999 4661 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.495008 4661 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.495017 4661 flags.go:64] FLAG: --storage-driver-password="root" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.495027 4661 flags.go:64] FLAG: --storage-driver-secure="false" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.495036 4661 flags.go:64] FLAG: --storage-driver-table="stats" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.495045 4661 flags.go:64] FLAG: --storage-driver-user="root" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.495054 4661 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.495064 4661 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.495073 4661 flags.go:64] FLAG: --system-cgroups="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.495082 4661 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.495096 4661 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.495105 4661 flags.go:64] FLAG: --tls-cert-file="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.495114 4661 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.495124 4661 flags.go:64] FLAG: --tls-min-version="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.495133 4661 flags.go:64] FLAG: --tls-private-key-file="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.495141 4661 flags.go:64] FLAG: --topology-manager-policy="none" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.495150 4661 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.495158 4661 flags.go:64] FLAG: --topology-manager-scope="container" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.495168 4661 flags.go:64] FLAG: --v="2" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.495181 4661 flags.go:64] FLAG: --version="false" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.495193 4661 flags.go:64] FLAG: --vmodule="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.495203 4661 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.495212 4661 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495422 4661 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495433 4661 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495442 4661 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495455 4661 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495463 4661 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495472 4661 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495481 4661 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495489 4661 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495498 4661 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495506 4661 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495514 4661 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495522 4661 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495532 4661 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495540 4661 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495549 4661 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495557 4661 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495565 4661 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495574 4661 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495581 4661 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495589 4661 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495597 4661 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495605 4661 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495613 4661 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495622 4661 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495653 4661 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495662 4661 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495670 4661 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495678 4661 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495686 4661 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495694 4661 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495702 4661 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495709 4661 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495717 4661 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495725 4661 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495733 4661 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495744 4661 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495752 4661 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495759 4661 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495767 4661 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495775 4661 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495783 4661 feature_gate.go:330] unrecognized feature gate: Example Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495790 4661 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495799 4661 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495807 4661 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495814 4661 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495822 4661 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495830 4661 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495837 4661 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495845 4661 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495856 4661 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495866 4661 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495874 4661 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495885 4661 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495897 4661 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495905 4661 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495913 4661 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495921 4661 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495928 4661 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495936 4661 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495945 4661 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495952 4661 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495960 4661 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495970 4661 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495980 4661 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495989 4661 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.495999 4661 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.496007 4661 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.496018 4661 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.496027 4661 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.496036 4661 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.496043 4661 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.496067 4661 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.507093 4661 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.507119 4661 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507194 4661 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507202 4661 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507207 4661 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507211 4661 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507215 4661 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507219 4661 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507222 4661 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507225 4661 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507229 4661 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507232 4661 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507236 4661 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507239 4661 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507243 4661 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507246 4661 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507251 4661 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507255 4661 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507259 4661 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507262 4661 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507267 4661 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507273 4661 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507278 4661 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507283 4661 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507288 4661 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507293 4661 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507298 4661 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507302 4661 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507308 4661 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507313 4661 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507318 4661 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507322 4661 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507329 4661 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507336 4661 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507343 4661 feature_gate.go:330] unrecognized feature gate: Example Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507351 4661 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507359 4661 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507365 4661 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507371 4661 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507376 4661 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507381 4661 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507386 4661 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507391 4661 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507397 4661 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507404 4661 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507410 4661 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507415 4661 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507423 4661 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507427 4661 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507430 4661 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507434 4661 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507437 4661 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507441 4661 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507445 4661 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507449 4661 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507453 4661 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507457 4661 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507461 4661 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507465 4661 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507469 4661 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507473 4661 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507476 4661 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507480 4661 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507484 4661 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507488 4661 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507491 4661 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507495 4661 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507498 4661 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507502 4661 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507505 4661 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507509 4661 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507513 4661 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507516 4661 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.507523 4661 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507682 4661 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507691 4661 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507696 4661 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507700 4661 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507704 4661 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507709 4661 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507713 4661 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507716 4661 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507721 4661 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507725 4661 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507729 4661 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507732 4661 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507736 4661 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507740 4661 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507744 4661 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507747 4661 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507751 4661 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507755 4661 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507759 4661 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507762 4661 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507766 4661 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507771 4661 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507776 4661 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507779 4661 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507783 4661 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507787 4661 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507790 4661 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507794 4661 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507797 4661 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507801 4661 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507805 4661 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507808 4661 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507812 4661 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507815 4661 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507819 4661 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507823 4661 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507826 4661 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507831 4661 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507836 4661 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507841 4661 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507846 4661 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507851 4661 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507855 4661 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507859 4661 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507863 4661 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507868 4661 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507872 4661 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507876 4661 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507880 4661 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507884 4661 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507888 4661 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507892 4661 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507897 4661 feature_gate.go:330] unrecognized feature gate: Example Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507901 4661 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507905 4661 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507909 4661 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507913 4661 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507918 4661 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507922 4661 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507925 4661 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507929 4661 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507932 4661 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507936 4661 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507940 4661 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507944 4661 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507947 4661 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507951 4661 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507955 4661 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507958 4661 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507964 4661 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.507968 4661 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.507974 4661 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.508141 4661 server.go:940] "Client rotation is on, will bootstrap in background" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.512701 4661 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.512772 4661 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.515000 4661 server.go:997] "Starting client certificate rotation" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.515021 4661 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.516334 4661 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-19 20:25:32.393794214 +0000 UTC Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.516524 4661 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1910h56m20.877274126s for next certificate rotation Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.540438 4661 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.543434 4661 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.568513 4661 log.go:25] "Validated CRI v1 runtime API" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.607061 4661 log.go:25] "Validated CRI v1 image API" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.609295 4661 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.618347 4661 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-01-05-24-09-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.618399 4661 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.641388 4661 manager.go:217] Machine: {Timestamp:2025-10-01 05:29:11.636791689 +0000 UTC m=+0.574770343 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654120448 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:5955e218-6cd8-4aae-9fe3-15f4479360e0 BootID:3e4e6ba7-0055-4484-bdb1-7c7b39829e51 Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:a2:33:75 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:a2:33:75 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:d8:2d:76 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:5b:be:6a Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:79:a3:e1 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:b8:31:6e Speed:-1 Mtu:1496} {Name:eth10 MacAddress:5a:47:e3:25:db:7f Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:1e:1f:cb:f1:99:c1 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654120448 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.641742 4661 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.641937 4661 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.645082 4661 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.645320 4661 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.645359 4661 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.645689 4661 topology_manager.go:138] "Creating topology manager with none policy" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.645707 4661 container_manager_linux.go:303] "Creating device plugin manager" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.646142 4661 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.646184 4661 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.646407 4661 state_mem.go:36] "Initialized new in-memory state store" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.646544 4661 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.650583 4661 kubelet.go:418] "Attempting to sync node with API server" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.650613 4661 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.650758 4661 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.650793 4661 kubelet.go:324] "Adding apiserver pod source" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.650812 4661 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.662410 4661 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.662404 4661 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 01 05:29:11 crc kubenswrapper[4661]: E1001 05:29:11.662547 4661 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.662576 4661 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 01 05:29:11 crc kubenswrapper[4661]: E1001 05:29:11.662736 4661 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.663913 4661 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.666588 4661 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.668176 4661 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.668220 4661 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.668236 4661 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.668250 4661 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.668272 4661 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.668286 4661 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.668302 4661 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.668325 4661 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.668368 4661 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.668383 4661 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.668402 4661 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.668415 4661 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.669153 4661 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.669892 4661 server.go:1280] "Started kubelet" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.671270 4661 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.671277 4661 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.671495 4661 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.672121 4661 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 01 05:29:11 crc systemd[1]: Started Kubernetes Kubelet. Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.673207 4661 server.go:460] "Adding debug handlers to kubelet server" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.673597 4661 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.673709 4661 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 01 05:29:11 crc kubenswrapper[4661]: E1001 05:29:11.674264 4661 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.674886 4661 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.675113 4661 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.675365 4661 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.675076 4661 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 16:49:22.71197749 +0000 UTC Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.675419 4661 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 2147h20m11.036573745s for next certificate rotation Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.675850 4661 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 01 05:29:11 crc kubenswrapper[4661]: E1001 05:29:11.675969 4661 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.676781 4661 factory.go:55] Registering systemd factory Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.676822 4661 factory.go:221] Registration of the systemd container factory successfully Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.677353 4661 factory.go:153] Registering CRI-O factory Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.677412 4661 factory.go:221] Registration of the crio container factory successfully Oct 01 05:29:11 crc kubenswrapper[4661]: E1001 05:29:11.677338 4661 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" interval="200ms" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.677522 4661 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.677565 4661 factory.go:103] Registering Raw factory Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.677590 4661 manager.go:1196] Started watching for new ooms in manager Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.678292 4661 manager.go:319] Starting recovery of all containers Oct 01 05:29:11 crc kubenswrapper[4661]: E1001 05:29:11.677466 4661 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.65:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186a46e480d9ce3d default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-01 05:29:11.669845565 +0000 UTC m=+0.607824209,LastTimestamp:2025-10-01 05:29:11.669845565 +0000 UTC m=+0.607824209,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699148 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699267 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699291 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699313 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699335 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699354 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699380 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699404 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699428 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699450 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699470 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699491 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699510 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699536 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699607 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699627 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699678 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699696 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699715 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699734 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699763 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699782 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699806 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699829 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699848 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699876 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699900 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.699932 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.700021 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.700044 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.700087 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.700107 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.700128 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.700153 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.700228 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.700255 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.700276 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.700306 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.700369 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.700392 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.700411 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.700434 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.700457 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702233 4661 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702280 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702307 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702329 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702351 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702371 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702394 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702416 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702437 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702458 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702508 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702535 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702559 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702583 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702603 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702661 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702683 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702704 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702728 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702780 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702803 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702823 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702843 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702863 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702887 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702910 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702929 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702951 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702971 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.702992 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703030 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703048 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703137 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703160 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703180 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703200 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703218 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703236 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703257 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703281 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703301 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703324 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703345 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703369 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703387 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703407 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703428 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703447 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703465 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703486 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703506 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703525 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703544 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703563 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703583 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703602 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703622 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703670 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703697 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703718 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703739 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703759 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703787 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703809 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703829 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703852 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703883 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703975 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.703998 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704021 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704042 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704062 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704083 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704106 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704128 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704151 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704170 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704193 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704212 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704230 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704255 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704274 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704295 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704316 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704371 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704393 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704413 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704431 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704450 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704470 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704489 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704509 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704529 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704550 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704570 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704589 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704619 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704676 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704698 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704720 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704740 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704760 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704779 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704801 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704823 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704844 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704865 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704892 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704911 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704932 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704955 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704977 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.704997 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705018 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705038 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705063 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705087 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705109 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705128 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705149 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705170 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705190 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705212 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705236 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705255 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705275 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705296 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705317 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705338 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705360 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705419 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705514 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705559 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705579 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705597 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705688 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705705 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705720 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705736 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705774 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705792 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705812 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705846 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705861 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705879 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705896 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705932 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705948 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705965 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.705980 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.706013 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.706032 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.706046 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.706061 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.706103 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.706124 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.706138 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.706177 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.706194 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.706217 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.706259 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.706279 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.706296 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.706313 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.706352 4661 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.706365 4661 reconstruct.go:97] "Volume reconstruction finished" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.706376 4661 reconciler.go:26] "Reconciler: start to sync state" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.712289 4661 manager.go:324] Recovery completed Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.722996 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.725427 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.725473 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.725486 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.727945 4661 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.727998 4661 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.728035 4661 state_mem.go:36] "Initialized new in-memory state store" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.745143 4661 policy_none.go:49] "None policy: Start" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.747960 4661 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.748051 4661 state_mem.go:35] "Initializing new in-memory state store" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.751602 4661 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.755558 4661 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.755608 4661 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.755675 4661 kubelet.go:2335] "Starting kubelet main sync loop" Oct 01 05:29:11 crc kubenswrapper[4661]: E1001 05:29:11.755736 4661 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 01 05:29:11 crc kubenswrapper[4661]: W1001 05:29:11.756534 4661 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 01 05:29:11 crc kubenswrapper[4661]: E1001 05:29:11.756618 4661 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Oct 01 05:29:11 crc kubenswrapper[4661]: E1001 05:29:11.775189 4661 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.803719 4661 manager.go:334] "Starting Device Plugin manager" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.803788 4661 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.803805 4661 server.go:79] "Starting device plugin registration server" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.804429 4661 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.804461 4661 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.805395 4661 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.805510 4661 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.805520 4661 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 01 05:29:11 crc kubenswrapper[4661]: E1001 05:29:11.818308 4661 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.856668 4661 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.856792 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.858294 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.858364 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.858387 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.858693 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.859005 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.859103 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.860255 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.860366 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.860387 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.860666 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.860755 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.860779 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.860821 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.860819 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.861026 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.862995 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.863055 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.863076 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.864485 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.864544 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.864567 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.865132 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.866294 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.866987 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.868649 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.868707 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.868729 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.868950 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.869176 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.869230 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.869789 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.869871 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.869895 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.870235 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.870279 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.870298 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.870299 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.870324 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.870335 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.870514 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.870550 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.871286 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.871312 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.871321 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:11 crc kubenswrapper[4661]: E1001 05:29:11.878644 4661 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" interval="400ms" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.908660 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.908741 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.908789 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.908822 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.908854 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.908888 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.908923 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.908954 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.908985 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.909081 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.909139 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.909168 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.909196 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.909242 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.909286 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.909396 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.910896 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.910952 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.910971 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:11 crc kubenswrapper[4661]: I1001 05:29:11.911069 4661 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 05:29:11 crc kubenswrapper[4661]: E1001 05:29:11.911586 4661 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.65:6443: connect: connection refused" node="crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.010688 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.010762 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.010792 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.010818 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.010844 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.010867 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.010888 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.010911 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.010908 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.010958 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.010969 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.010968 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.011009 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.011033 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.011051 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.011016 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.011052 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.011005 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.011071 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.011166 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.011201 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.011205 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.011276 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.011244 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.011410 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.011472 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.011500 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.011536 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.011621 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.011668 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.112112 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.114324 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.114383 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.114396 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.114442 4661 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 05:29:12 crc kubenswrapper[4661]: E1001 05:29:12.115054 4661 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.65:6443: connect: connection refused" node="crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.206386 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.235696 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.245817 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: W1001 05:29:12.256171 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-ded28de644bf7d44657320d6031164584310c435e9466195d90dd60474ae7d66 WatchSource:0}: Error finding container ded28de644bf7d44657320d6031164584310c435e9466195d90dd60474ae7d66: Status 404 returned error can't find the container with id ded28de644bf7d44657320d6031164584310c435e9466195d90dd60474ae7d66 Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.263422 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.268716 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 05:29:12 crc kubenswrapper[4661]: W1001 05:29:12.274846 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-dc0f0223902f7f2a8d74a069add834045f264009a6728b3d179f8a028f4889c7 WatchSource:0}: Error finding container dc0f0223902f7f2a8d74a069add834045f264009a6728b3d179f8a028f4889c7: Status 404 returned error can't find the container with id dc0f0223902f7f2a8d74a069add834045f264009a6728b3d179f8a028f4889c7 Oct 01 05:29:12 crc kubenswrapper[4661]: W1001 05:29:12.277790 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-8bd16b35fb527451188c483112f2be61ed825451618bfe477d6f243c54f7487c WatchSource:0}: Error finding container 8bd16b35fb527451188c483112f2be61ed825451618bfe477d6f243c54f7487c: Status 404 returned error can't find the container with id 8bd16b35fb527451188c483112f2be61ed825451618bfe477d6f243c54f7487c Oct 01 05:29:12 crc kubenswrapper[4661]: E1001 05:29:12.281046 4661 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" interval="800ms" Oct 01 05:29:12 crc kubenswrapper[4661]: W1001 05:29:12.285710 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-99b41d3f058e075280d9ce2c49ae3f5f235b91fc8e143361accbb699d1c0d5b6 WatchSource:0}: Error finding container 99b41d3f058e075280d9ce2c49ae3f5f235b91fc8e143361accbb699d1c0d5b6: Status 404 returned error can't find the container with id 99b41d3f058e075280d9ce2c49ae3f5f235b91fc8e143361accbb699d1c0d5b6 Oct 01 05:29:12 crc kubenswrapper[4661]: W1001 05:29:12.295243 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-749896c8639fe7c1708f2196d6fe3054deed88246951057154b883e551ac1515 WatchSource:0}: Error finding container 749896c8639fe7c1708f2196d6fe3054deed88246951057154b883e551ac1515: Status 404 returned error can't find the container with id 749896c8639fe7c1708f2196d6fe3054deed88246951057154b883e551ac1515 Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.516118 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.517671 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.517719 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.517732 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.517762 4661 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 05:29:12 crc kubenswrapper[4661]: E1001 05:29:12.518432 4661 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.65:6443: connect: connection refused" node="crc" Oct 01 05:29:12 crc kubenswrapper[4661]: W1001 05:29:12.621597 4661 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 01 05:29:12 crc kubenswrapper[4661]: E1001 05:29:12.621751 4661 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.673147 4661 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.762431 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"99b41d3f058e075280d9ce2c49ae3f5f235b91fc8e143361accbb699d1c0d5b6"} Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.764106 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8bd16b35fb527451188c483112f2be61ed825451618bfe477d6f243c54f7487c"} Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.765570 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"dc0f0223902f7f2a8d74a069add834045f264009a6728b3d179f8a028f4889c7"} Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.767343 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ded28de644bf7d44657320d6031164584310c435e9466195d90dd60474ae7d66"} Oct 01 05:29:12 crc kubenswrapper[4661]: I1001 05:29:12.773540 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"749896c8639fe7c1708f2196d6fe3054deed88246951057154b883e551ac1515"} Oct 01 05:29:12 crc kubenswrapper[4661]: W1001 05:29:12.814197 4661 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 01 05:29:12 crc kubenswrapper[4661]: E1001 05:29:12.814293 4661 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Oct 01 05:29:12 crc kubenswrapper[4661]: W1001 05:29:12.905607 4661 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 01 05:29:12 crc kubenswrapper[4661]: E1001 05:29:12.905804 4661 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Oct 01 05:29:12 crc kubenswrapper[4661]: W1001 05:29:12.958874 4661 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 01 05:29:12 crc kubenswrapper[4661]: E1001 05:29:12.958956 4661 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Oct 01 05:29:13 crc kubenswrapper[4661]: E1001 05:29:13.082844 4661 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" interval="1.6s" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.319445 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.321869 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.321923 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.321943 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.321981 4661 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 05:29:13 crc kubenswrapper[4661]: E1001 05:29:13.322583 4661 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.65:6443: connect: connection refused" node="crc" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.672714 4661 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.778209 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6"} Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.778279 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e"} Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.778297 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396"} Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.778314 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da"} Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.779297 4661 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8" exitCode=0 Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.779366 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8"} Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.779533 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.781124 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.781152 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.781162 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.782370 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.782514 4661 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4" exitCode=0 Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.782606 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4"} Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.782666 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.783446 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.783472 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.783486 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.784093 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.784192 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.784212 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.785939 4661 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="407cbc7364ba0fb345a3fb2b8f0af7c32d9692bd0dd5f7f91a653a3f38715361" exitCode=0 Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.786013 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"407cbc7364ba0fb345a3fb2b8f0af7c32d9692bd0dd5f7f91a653a3f38715361"} Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.786128 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.787469 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.787502 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.787517 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.789226 4661 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41" exitCode=0 Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.789258 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41"} Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.789388 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.792267 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.792339 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:13 crc kubenswrapper[4661]: I1001 05:29:13.792358 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:14 crc kubenswrapper[4661]: W1001 05:29:14.535791 4661 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 01 05:29:14 crc kubenswrapper[4661]: E1001 05:29:14.535908 4661 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.672823 4661 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 01 05:29:14 crc kubenswrapper[4661]: E1001 05:29:14.684002 4661 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" interval="3.2s" Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.798693 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"f2d0b1154fb43152360c4b932cb380bcfd9d026b840c24371cb7c9c1229e45e6"} Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.798731 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.799730 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.799770 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.799782 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.801442 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"4ddc9d31f544f57f371c09f3655228bbb0be49788003ae551815a758f444e7d6"} Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.801536 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"4a86e8d41edfedbf8a5f0c04d2a0f52c06dbfdefbcdf7c0fe37bc5fd151ffccc"} Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.801558 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"d44275c6d9c54d40ca33dac51d37ca7a9784345b0890601d797c47f4a31e17ff"} Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.801488 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.802443 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.802476 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.802486 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.805683 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d"} Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.805733 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1"} Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.805745 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde"} Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.805758 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02"} Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.807369 4661 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131" exitCode=0 Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.807431 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131"} Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.807447 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.807507 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.808546 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.808569 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.808578 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.808588 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.808619 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.808682 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.922879 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.924462 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.924507 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.924560 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:14 crc kubenswrapper[4661]: I1001 05:29:14.924591 4661 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 05:29:14 crc kubenswrapper[4661]: E1001 05:29:14.925239 4661 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.65:6443: connect: connection refused" node="crc" Oct 01 05:29:15 crc kubenswrapper[4661]: W1001 05:29:15.336717 4661 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 01 05:29:15 crc kubenswrapper[4661]: E1001 05:29:15.336822 4661 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Oct 01 05:29:15 crc kubenswrapper[4661]: E1001 05:29:15.354385 4661 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.65:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186a46e480d9ce3d default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-01 05:29:11.669845565 +0000 UTC m=+0.607824209,LastTimestamp:2025-10-01 05:29:11.669845565 +0000 UTC m=+0.607824209,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 01 05:29:15 crc kubenswrapper[4661]: I1001 05:29:15.813038 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc"} Oct 01 05:29:15 crc kubenswrapper[4661]: I1001 05:29:15.813237 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:15 crc kubenswrapper[4661]: I1001 05:29:15.814312 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:15 crc kubenswrapper[4661]: I1001 05:29:15.814395 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:15 crc kubenswrapper[4661]: I1001 05:29:15.814413 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:15 crc kubenswrapper[4661]: I1001 05:29:15.816134 4661 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6" exitCode=0 Oct 01 05:29:15 crc kubenswrapper[4661]: I1001 05:29:15.816299 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:15 crc kubenswrapper[4661]: I1001 05:29:15.816376 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6"} Oct 01 05:29:15 crc kubenswrapper[4661]: I1001 05:29:15.816496 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 05:29:15 crc kubenswrapper[4661]: I1001 05:29:15.816314 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:15 crc kubenswrapper[4661]: I1001 05:29:15.816326 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:15 crc kubenswrapper[4661]: I1001 05:29:15.817580 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:15 crc kubenswrapper[4661]: I1001 05:29:15.817671 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:15 crc kubenswrapper[4661]: I1001 05:29:15.817699 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:15 crc kubenswrapper[4661]: I1001 05:29:15.817994 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:15 crc kubenswrapper[4661]: I1001 05:29:15.818026 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:15 crc kubenswrapper[4661]: I1001 05:29:15.818103 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:15 crc kubenswrapper[4661]: I1001 05:29:15.818171 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:15 crc kubenswrapper[4661]: I1001 05:29:15.818054 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:15 crc kubenswrapper[4661]: I1001 05:29:15.818245 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:16 crc kubenswrapper[4661]: I1001 05:29:16.141800 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 05:29:16 crc kubenswrapper[4661]: I1001 05:29:16.824529 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139"} Oct 01 05:29:16 crc kubenswrapper[4661]: I1001 05:29:16.824599 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860"} Oct 01 05:29:16 crc kubenswrapper[4661]: I1001 05:29:16.824676 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297"} Oct 01 05:29:16 crc kubenswrapper[4661]: I1001 05:29:16.824708 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e"} Oct 01 05:29:16 crc kubenswrapper[4661]: I1001 05:29:16.824730 4661 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 05:29:16 crc kubenswrapper[4661]: I1001 05:29:16.824778 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:16 crc kubenswrapper[4661]: I1001 05:29:16.824796 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:16 crc kubenswrapper[4661]: I1001 05:29:16.826267 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:16 crc kubenswrapper[4661]: I1001 05:29:16.826335 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:16 crc kubenswrapper[4661]: I1001 05:29:16.826359 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:16 crc kubenswrapper[4661]: I1001 05:29:16.826502 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:16 crc kubenswrapper[4661]: I1001 05:29:16.826532 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:16 crc kubenswrapper[4661]: I1001 05:29:16.826588 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:17 crc kubenswrapper[4661]: I1001 05:29:17.834301 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 05:29:17 crc kubenswrapper[4661]: I1001 05:29:17.834499 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:17 crc kubenswrapper[4661]: I1001 05:29:17.835805 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb"} Oct 01 05:29:17 crc kubenswrapper[4661]: I1001 05:29:17.835920 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:17 crc kubenswrapper[4661]: I1001 05:29:17.836009 4661 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 05:29:17 crc kubenswrapper[4661]: I1001 05:29:17.836096 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:17 crc kubenswrapper[4661]: I1001 05:29:17.836134 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:17 crc kubenswrapper[4661]: I1001 05:29:17.836192 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:17 crc kubenswrapper[4661]: I1001 05:29:17.836215 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:17 crc kubenswrapper[4661]: I1001 05:29:17.836998 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:17 crc kubenswrapper[4661]: I1001 05:29:17.837027 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:17 crc kubenswrapper[4661]: I1001 05:29:17.837038 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:17 crc kubenswrapper[4661]: I1001 05:29:17.841563 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:17 crc kubenswrapper[4661]: I1001 05:29:17.841690 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:17 crc kubenswrapper[4661]: I1001 05:29:17.841716 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:18 crc kubenswrapper[4661]: I1001 05:29:18.126237 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:18 crc kubenswrapper[4661]: I1001 05:29:18.128082 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:18 crc kubenswrapper[4661]: I1001 05:29:18.128335 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:18 crc kubenswrapper[4661]: I1001 05:29:18.128500 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:18 crc kubenswrapper[4661]: I1001 05:29:18.128710 4661 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 05:29:18 crc kubenswrapper[4661]: I1001 05:29:18.356023 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 01 05:29:18 crc kubenswrapper[4661]: I1001 05:29:18.408832 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 05:29:18 crc kubenswrapper[4661]: I1001 05:29:18.691211 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 05:29:18 crc kubenswrapper[4661]: I1001 05:29:18.839384 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:18 crc kubenswrapper[4661]: I1001 05:29:18.839457 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:18 crc kubenswrapper[4661]: I1001 05:29:18.839474 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:18 crc kubenswrapper[4661]: I1001 05:29:18.841370 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:18 crc kubenswrapper[4661]: I1001 05:29:18.841435 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:18 crc kubenswrapper[4661]: I1001 05:29:18.841508 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:18 crc kubenswrapper[4661]: I1001 05:29:18.841434 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:18 crc kubenswrapper[4661]: I1001 05:29:18.841573 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:18 crc kubenswrapper[4661]: I1001 05:29:18.841593 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:18 crc kubenswrapper[4661]: I1001 05:29:18.841678 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:18 crc kubenswrapper[4661]: I1001 05:29:18.841699 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:18 crc kubenswrapper[4661]: I1001 05:29:18.841714 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:19 crc kubenswrapper[4661]: I1001 05:29:19.842958 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:19 crc kubenswrapper[4661]: I1001 05:29:19.844390 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:19 crc kubenswrapper[4661]: I1001 05:29:19.844495 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:19 crc kubenswrapper[4661]: I1001 05:29:19.844530 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:19 crc kubenswrapper[4661]: I1001 05:29:19.956212 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 05:29:19 crc kubenswrapper[4661]: I1001 05:29:19.956509 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:19 crc kubenswrapper[4661]: I1001 05:29:19.958696 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:19 crc kubenswrapper[4661]: I1001 05:29:19.958745 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:19 crc kubenswrapper[4661]: I1001 05:29:19.958769 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:20 crc kubenswrapper[4661]: I1001 05:29:20.021883 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 05:29:20 crc kubenswrapper[4661]: I1001 05:29:20.022183 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:20 crc kubenswrapper[4661]: I1001 05:29:20.024559 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:20 crc kubenswrapper[4661]: I1001 05:29:20.024687 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:20 crc kubenswrapper[4661]: I1001 05:29:20.024719 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:20 crc kubenswrapper[4661]: I1001 05:29:20.695228 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 01 05:29:20 crc kubenswrapper[4661]: I1001 05:29:20.834503 4661 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 01 05:29:20 crc kubenswrapper[4661]: I1001 05:29:20.834604 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 01 05:29:20 crc kubenswrapper[4661]: I1001 05:29:20.845851 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:20 crc kubenswrapper[4661]: I1001 05:29:20.847257 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:20 crc kubenswrapper[4661]: I1001 05:29:20.847485 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:20 crc kubenswrapper[4661]: I1001 05:29:20.847497 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:21 crc kubenswrapper[4661]: I1001 05:29:21.217182 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 05:29:21 crc kubenswrapper[4661]: I1001 05:29:21.217394 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:21 crc kubenswrapper[4661]: I1001 05:29:21.219354 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:21 crc kubenswrapper[4661]: I1001 05:29:21.219408 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:21 crc kubenswrapper[4661]: I1001 05:29:21.219419 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:21 crc kubenswrapper[4661]: I1001 05:29:21.225932 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 05:29:21 crc kubenswrapper[4661]: E1001 05:29:21.818449 4661 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 01 05:29:21 crc kubenswrapper[4661]: I1001 05:29:21.848313 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:21 crc kubenswrapper[4661]: I1001 05:29:21.849817 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:21 crc kubenswrapper[4661]: I1001 05:29:21.849892 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:21 crc kubenswrapper[4661]: I1001 05:29:21.849918 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:25 crc kubenswrapper[4661]: W1001 05:29:25.579314 4661 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 01 05:29:25 crc kubenswrapper[4661]: I1001 05:29:25.579417 4661 trace.go:236] Trace[1750869015]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Oct-2025 05:29:15.577) (total time: 10001ms): Oct 01 05:29:25 crc kubenswrapper[4661]: Trace[1750869015]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (05:29:25.579) Oct 01 05:29:25 crc kubenswrapper[4661]: Trace[1750869015]: [10.00157624s] [10.00157624s] END Oct 01 05:29:25 crc kubenswrapper[4661]: E1001 05:29:25.579445 4661 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 01 05:29:25 crc kubenswrapper[4661]: W1001 05:29:25.625175 4661 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 01 05:29:25 crc kubenswrapper[4661]: I1001 05:29:25.625303 4661 trace.go:236] Trace[1142989790]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Oct-2025 05:29:15.623) (total time: 10001ms): Oct 01 05:29:25 crc kubenswrapper[4661]: Trace[1142989790]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (05:29:25.625) Oct 01 05:29:25 crc kubenswrapper[4661]: Trace[1142989790]: [10.001712104s] [10.001712104s] END Oct 01 05:29:25 crc kubenswrapper[4661]: E1001 05:29:25.625333 4661 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 01 05:29:25 crc kubenswrapper[4661]: I1001 05:29:25.674045 4661 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Oct 01 05:29:26 crc kubenswrapper[4661]: I1001 05:29:26.202676 4661 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 01 05:29:26 crc kubenswrapper[4661]: I1001 05:29:26.202777 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 01 05:29:26 crc kubenswrapper[4661]: I1001 05:29:26.212085 4661 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 01 05:29:26 crc kubenswrapper[4661]: I1001 05:29:26.212175 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.188367 4661 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.424695 4661 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.662210 4661 apiserver.go:52] "Watching apiserver" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.669530 4661 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.669941 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb"] Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.670566 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.670619 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:29:29 crc kubenswrapper[4661]: E1001 05:29:29.670781 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.670822 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.670822 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.671380 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.671070 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 05:29:29 crc kubenswrapper[4661]: E1001 05:29:29.671912 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:29:29 crc kubenswrapper[4661]: E1001 05:29:29.672052 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.673504 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.673739 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.673963 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.674018 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.674244 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.675803 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.675870 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.676014 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.676306 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.677819 4661 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.720003 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.738697 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.755065 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.771587 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.787131 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.804367 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.819904 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.838299 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.964743 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.972276 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.981033 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.983136 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Oct 01 05:29:29 crc kubenswrapper[4661]: I1001 05:29:29.998605 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.015434 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.029272 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.033322 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.050111 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.064863 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.079691 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.084027 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.101533 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.117542 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.135114 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.149949 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.163629 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.173953 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.732112 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.748236 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.751197 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.754202 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.756901 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:29:30 crc kubenswrapper[4661]: E1001 05:29:30.757065 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.766522 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.782779 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.800856 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.821165 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.835438 4661 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.835532 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.836525 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.852559 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.869783 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.874562 4661 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.889208 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.924099 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.943231 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.962145 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.978977 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:30 crc kubenswrapper[4661]: I1001 05:29:30.995350 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.011310 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.023879 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.042129 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.208483 4661 trace.go:236] Trace[146267063]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Oct-2025 05:29:20.009) (total time: 11198ms): Oct 01 05:29:31 crc kubenswrapper[4661]: Trace[146267063]: ---"Objects listed" error: 11198ms (05:29:31.208) Oct 01 05:29:31 crc kubenswrapper[4661]: Trace[146267063]: [11.198582315s] [11.198582315s] END Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.208527 4661 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.211516 4661 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.212166 4661 trace.go:236] Trace[1863548479]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Oct-2025 05:29:19.142) (total time: 12069ms): Oct 01 05:29:31 crc kubenswrapper[4661]: Trace[1863548479]: ---"Objects listed" error: 12069ms (05:29:31.211) Oct 01 05:29:31 crc kubenswrapper[4661]: Trace[1863548479]: [12.069812308s] [12.069812308s] END Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.212210 4661 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.220557 4661 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.225327 4661 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.264737 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.284682 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.303048 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326052 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326107 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326134 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326162 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326182 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326204 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326226 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326246 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326272 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326293 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326314 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326365 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326387 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326408 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326435 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326466 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326492 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326513 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326533 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326579 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326599 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326619 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326658 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326681 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326703 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326746 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326767 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326788 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326829 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326853 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326872 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326893 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326914 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326956 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326921 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.326995 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327016 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327037 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327074 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327093 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327112 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327155 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327175 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327194 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327215 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327233 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327252 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327273 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327291 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327311 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327335 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327370 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327421 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327441 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327462 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327481 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327500 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327556 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327579 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327598 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327618 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327658 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327679 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327698 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327718 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327703 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327723 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327776 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327800 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327820 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327840 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327862 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327890 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327910 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327970 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.327991 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328003 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328011 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328070 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328108 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328146 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328185 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328225 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328237 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328244 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328285 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328302 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328332 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328355 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328380 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328403 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328427 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328529 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328567 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328575 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328592 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328583 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328689 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328703 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328741 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328772 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328800 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328821 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328844 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328866 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328888 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328907 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328906 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328931 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328955 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328966 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.328979 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329065 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329109 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329145 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329181 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329185 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329230 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329253 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329278 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329299 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329322 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329347 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329371 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329392 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329413 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329410 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329438 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329464 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329488 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329514 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329511 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329523 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329567 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329593 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.329624 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.330490 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.330703 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.330743 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.330763 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.330791 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.330828 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.330859 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.330885 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.330916 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.330947 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.331069 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.331256 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.331520 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.331838 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332001 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332138 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332189 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332224 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332233 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332260 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332293 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332330 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332363 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332389 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332421 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332451 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332477 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332508 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332539 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332547 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332575 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332601 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332651 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332684 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332713 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332746 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332773 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332784 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332805 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332874 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332867 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332890 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.332963 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333005 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333049 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333082 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333116 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333149 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333177 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333188 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333203 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333233 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333263 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333288 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333318 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333317 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333370 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333402 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333433 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333460 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333486 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333517 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333535 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333549 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333557 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333581 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333564 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333618 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333671 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333698 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333727 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333768 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333799 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333799 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333827 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333859 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333888 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333915 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333944 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.333973 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334005 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334033 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334026 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334065 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334103 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334130 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334140 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334163 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334197 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334225 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334243 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334255 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334287 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334319 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334350 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334428 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334444 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334461 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334562 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334593 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334609 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334624 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334837 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334845 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334865 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334916 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334955 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.334992 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335025 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335048 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335058 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335120 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335183 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335219 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335325 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335344 4661 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335345 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335361 4661 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335378 4661 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335398 4661 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335412 4661 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335427 4661 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335447 4661 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335461 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335476 4661 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335489 4661 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335511 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335528 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335542 4661 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335556 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335573 4661 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335586 4661 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335602 4661 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335623 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.336394 4661 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.336412 4661 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.336426 4661 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.336448 4661 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.336463 4661 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.336476 4661 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.336491 4661 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.336509 4661 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.336563 4661 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.336579 4661 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.336593 4661 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.336610 4661 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.336624 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.336655 4661 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.336673 4661 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.336688 4661 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.336702 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.336716 4661 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.336733 4661 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.336748 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335599 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.335749 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.336386 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.336670 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.337051 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.337294 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.337715 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.338747 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.338784 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.339726 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.340261 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.340517 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.340870 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.340946 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.340989 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:29:31.840958866 +0000 UTC m=+20.778937490 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.341066 4661 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.341162 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:31.84111234 +0000 UTC m=+20.779090964 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.342074 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.342293 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.342523 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.342667 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.343777 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.343958 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.344005 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.344207 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.344650 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.344987 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.345050 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.342995 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.343218 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.345257 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.345810 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.345909 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.346000 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.346398 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.346441 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.346581 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.346711 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.346826 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.347013 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.347088 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.347578 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.347648 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.347806 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.347908 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.348024 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.348034 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.348101 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.348142 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.347747 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.348170 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.348306 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.348330 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.348408 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.348441 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.348552 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.348453 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.348260 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.348673 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.348678 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.348822 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.348985 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.349036 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.349243 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.349378 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.349405 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.350143 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.350502 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.350079 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.350943 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.350977 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.350991 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.351197 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.351210 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.351361 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.351439 4661 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.351477 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.351498 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.351517 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.351719 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.351780 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.351932 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.351996 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.342964 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.352003 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.354337 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.355256 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.361853 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.362174 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.362930 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.363480 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.363728 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.364126 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.364844 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.365842 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.366177 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.366174 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.366234 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.366418 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.366750 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.368003 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.368318 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.368592 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.368892 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.368987 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.369025 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.369144 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.369153 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.369328 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.369605 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.369745 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.369755 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.369881 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.370177 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.370293 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.370317 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.370758 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.370995 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.371094 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.371229 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.371537 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.371588 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.372377 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.372459 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.373467 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.373526 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.373684 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.373943 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.374177 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.374411 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.374541 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.374536 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.374606 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.374971 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.375294 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.375856 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.375902 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.376155 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.376261 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.376294 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.376599 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.376831 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.376841 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.377166 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.377268 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.377329 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.377478 4661 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.377497 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.377522 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.377559 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:31.877536308 +0000 UTC m=+20.815514932 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.377700 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.378481 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.379080 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.379579 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.386134 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.386612 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.389852 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.390244 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.391526 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.391888 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.391996 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.392141 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.392181 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.392123 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.392274 4661 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.392407 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:31.892360297 +0000 UTC m=+20.830339131 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.393829 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.393918 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.393937 4661 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.393999 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:31.89398111 +0000 UTC m=+20.831959734 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.394386 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.397124 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.400388 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.407112 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.408223 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.416565 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.420136 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.426982 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.429443 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437505 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437553 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437593 4661 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437604 4661 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437613 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437621 4661 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437648 4661 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437656 4661 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437666 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437674 4661 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437682 4661 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437690 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437697 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437705 4661 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437713 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437722 4661 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437729 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437739 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437747 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437757 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437766 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437774 4661 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437782 4661 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437790 4661 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437798 4661 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437807 4661 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437818 4661 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437829 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437841 4661 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437852 4661 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437864 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437875 4661 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437885 4661 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437898 4661 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437912 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437923 4661 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437934 4661 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437945 4661 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437957 4661 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437969 4661 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437983 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437994 4661 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.437992 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438005 4661 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438055 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438070 4661 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438082 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438095 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438108 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438132 4661 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438146 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438159 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438159 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438172 4661 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438198 4661 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438215 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438229 4661 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438242 4661 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438255 4661 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438266 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438275 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438284 4661 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438294 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438306 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438316 4661 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438328 4661 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438340 4661 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438353 4661 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438364 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438375 4661 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438386 4661 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438397 4661 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438408 4661 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438418 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438429 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438442 4661 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438455 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438468 4661 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438479 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438491 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438503 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438514 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438526 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438537 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438547 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438558 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438568 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438579 4661 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438592 4661 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438604 4661 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438616 4661 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438651 4661 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438664 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438676 4661 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438687 4661 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438699 4661 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438710 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438722 4661 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438735 4661 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438745 4661 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438756 4661 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438767 4661 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438780 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438790 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438801 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438813 4661 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438830 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438841 4661 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438852 4661 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438865 4661 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438875 4661 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438888 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438899 4661 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438910 4661 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438923 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438934 4661 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438950 4661 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438962 4661 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438973 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438986 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.438998 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439009 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439021 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439040 4661 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439051 4661 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439061 4661 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439073 4661 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439084 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439094 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439105 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439115 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439125 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439170 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439182 4661 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439191 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439201 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439212 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439223 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439233 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439243 4661 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439254 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439265 4661 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439275 4661 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439286 4661 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439297 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439308 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439318 4661 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439330 4661 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439343 4661 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439354 4661 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439364 4661 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439375 4661 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439386 4661 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439399 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439411 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439423 4661 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439435 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439448 4661 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439460 4661 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.439915 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.449274 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.493719 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.510937 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 05:29:31 crc kubenswrapper[4661]: W1001 05:29:31.518667 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-2863831e56d1498fd2e265d09660b0fe4044551b084e0e3679334124edd8e7b4 WatchSource:0}: Error finding container 2863831e56d1498fd2e265d09660b0fe4044551b084e0e3679334124edd8e7b4: Status 404 returned error can't find the container with id 2863831e56d1498fd2e265d09660b0fe4044551b084e0e3679334124edd8e7b4 Oct 01 05:29:31 crc kubenswrapper[4661]: W1001 05:29:31.524330 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-90fee97f243095421571e96115f38891e8839d960a27daf6575bef405fc8b232 WatchSource:0}: Error finding container 90fee97f243095421571e96115f38891e8839d960a27daf6575bef405fc8b232: Status 404 returned error can't find the container with id 90fee97f243095421571e96115f38891e8839d960a27daf6575bef405fc8b232 Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.524585 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 05:29:31 crc kubenswrapper[4661]: W1001 05:29:31.541584 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-dc81aa1b6cd8b46fdbd9e11973bb7638eaaef74f5ea784b00d2f95f5f69cdf1c WatchSource:0}: Error finding container dc81aa1b6cd8b46fdbd9e11973bb7638eaaef74f5ea784b00d2f95f5f69cdf1c: Status 404 returned error can't find the container with id dc81aa1b6cd8b46fdbd9e11973bb7638eaaef74f5ea784b00d2f95f5f69cdf1c Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.756072 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.756241 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.756601 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.756713 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.760547 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.761324 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.762417 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.763032 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.763981 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.764484 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.765097 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.765984 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.766823 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.767335 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.767532 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.768225 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.769029 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.769889 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.770437 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.770983 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.772264 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.773227 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.774237 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.775112 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.775893 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.776433 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.776906 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.777037 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.777487 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.778160 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.778672 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.779380 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.780151 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.781314 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.781993 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.782492 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.783699 4661 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.783812 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.785444 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.786620 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.787717 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.788729 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.790750 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.791477 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.792542 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.793260 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.794428 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.794976 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.795803 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.796466 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.797817 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.798335 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.799434 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.800043 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.800144 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.801321 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.801868 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.802870 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.803471 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.804169 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.805228 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.805716 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.821104 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.835200 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.841808 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.842021 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.842082 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:29:32.842063174 +0000 UTC m=+21.780041788 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.842145 4661 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.842273 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:32.842266249 +0000 UTC m=+21.780244863 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.849174 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.859256 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.871077 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.880252 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177"} Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.880323 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"2863831e56d1498fd2e265d09660b0fe4044551b084e0e3679334124edd8e7b4"} Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.888597 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4"} Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.888973 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee"} Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.889028 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"dc81aa1b6cd8b46fdbd9e11973bb7638eaaef74f5ea784b00d2f95f5f69cdf1c"} Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.890551 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"90fee97f243095421571e96115f38891e8839d960a27daf6575bef405fc8b232"} Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.895879 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.906599 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.917212 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.928356 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.941079 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.943810 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.943957 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.944082 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.944204 4661 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.944330 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:32.94429672 +0000 UTC m=+21.882275374 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.944369 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.944434 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.944461 4661 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.944566 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:32.944532726 +0000 UTC m=+21.882511370 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.944709 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.944731 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.944748 4661 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:31 crc kubenswrapper[4661]: E1001 05:29:31.944793 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:32.944779203 +0000 UTC m=+21.882757847 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.953261 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.973584 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:31 crc kubenswrapper[4661]: I1001 05:29:31.986467 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.001205 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.016926 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.046691 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.062229 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.084623 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.105254 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.121588 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.134373 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.148922 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.159032 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.756152 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:29:32 crc kubenswrapper[4661]: E1001 05:29:32.756294 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.852431 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.852506 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:32 crc kubenswrapper[4661]: E1001 05:29:32.852599 4661 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 05:29:32 crc kubenswrapper[4661]: E1001 05:29:32.852658 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:29:34.852607414 +0000 UTC m=+23.790586028 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:29:32 crc kubenswrapper[4661]: E1001 05:29:32.852706 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:34.852691416 +0000 UTC m=+23.790670030 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.868848 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-f8vtz"] Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.869152 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-slmf7"] Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.869176 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-f8vtz" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.869706 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-slmf7" Oct 01 05:29:32 crc kubenswrapper[4661]: W1001 05:29:32.874437 4661 reflector.go:561] object-"openshift-dns"/"node-resolver-dockercfg-kz9s7": failed to list *v1.Secret: secrets "node-resolver-dockercfg-kz9s7" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Oct 01 05:29:32 crc kubenswrapper[4661]: E1001 05:29:32.874478 4661 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"node-resolver-dockercfg-kz9s7\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"node-resolver-dockercfg-kz9s7\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 01 05:29:32 crc kubenswrapper[4661]: W1001 05:29:32.874509 4661 reflector.go:561] object-"openshift-dns"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Oct 01 05:29:32 crc kubenswrapper[4661]: E1001 05:29:32.874551 4661 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 01 05:29:32 crc kubenswrapper[4661]: W1001 05:29:32.874680 4661 reflector.go:561] object-"openshift-dns"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Oct 01 05:29:32 crc kubenswrapper[4661]: E1001 05:29:32.874705 4661 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.880986 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.881096 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.881199 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.881321 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.895874 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:32Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.908055 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:32Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.917286 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:32Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.932739 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:32Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.943417 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:32Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.953509 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.953549 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/4783f8c4-bc93-4f21-b88f-62167f7ec68b-hosts-file\") pod \"node-resolver-f8vtz\" (UID: \"4783f8c4-bc93-4f21-b88f-62167f7ec68b\") " pod="openshift-dns/node-resolver-f8vtz" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.953568 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmx9v\" (UniqueName: \"kubernetes.io/projected/4783f8c4-bc93-4f21-b88f-62167f7ec68b-kube-api-access-xmx9v\") pod \"node-resolver-f8vtz\" (UID: \"4783f8c4-bc93-4f21-b88f-62167f7ec68b\") " pod="openshift-dns/node-resolver-f8vtz" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.953593 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.953609 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/584b2672-fbcc-4c9c-9ead-fdf45d9d1fff-host\") pod \"node-ca-slmf7\" (UID: \"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\") " pod="openshift-image-registry/node-ca-slmf7" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.953624 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/584b2672-fbcc-4c9c-9ead-fdf45d9d1fff-serviceca\") pod \"node-ca-slmf7\" (UID: \"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\") " pod="openshift-image-registry/node-ca-slmf7" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.953655 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.953672 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjvpr\" (UniqueName: \"kubernetes.io/projected/584b2672-fbcc-4c9c-9ead-fdf45d9d1fff-kube-api-access-qjvpr\") pod \"node-ca-slmf7\" (UID: \"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\") " pod="openshift-image-registry/node-ca-slmf7" Oct 01 05:29:32 crc kubenswrapper[4661]: E1001 05:29:32.953793 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 05:29:32 crc kubenswrapper[4661]: E1001 05:29:32.953808 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 05:29:32 crc kubenswrapper[4661]: E1001 05:29:32.953819 4661 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:32 crc kubenswrapper[4661]: E1001 05:29:32.953852 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:34.953840502 +0000 UTC m=+23.891819106 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:32 crc kubenswrapper[4661]: E1001 05:29:32.954146 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 05:29:32 crc kubenswrapper[4661]: E1001 05:29:32.954162 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 05:29:32 crc kubenswrapper[4661]: E1001 05:29:32.954169 4661 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:32 crc kubenswrapper[4661]: E1001 05:29:32.954190 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:34.954183761 +0000 UTC m=+23.892162375 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:32 crc kubenswrapper[4661]: E1001 05:29:32.954236 4661 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 05:29:32 crc kubenswrapper[4661]: E1001 05:29:32.954259 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:34.954252553 +0000 UTC m=+23.892231167 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.961693 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:32Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.979316 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:32Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:32 crc kubenswrapper[4661]: I1001 05:29:32.992421 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:32Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.002453 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.011277 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.022101 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.032532 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.042374 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.054783 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmx9v\" (UniqueName: \"kubernetes.io/projected/4783f8c4-bc93-4f21-b88f-62167f7ec68b-kube-api-access-xmx9v\") pod \"node-resolver-f8vtz\" (UID: \"4783f8c4-bc93-4f21-b88f-62167f7ec68b\") " pod="openshift-dns/node-resolver-f8vtz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.054835 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/584b2672-fbcc-4c9c-9ead-fdf45d9d1fff-host\") pod \"node-ca-slmf7\" (UID: \"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\") " pod="openshift-image-registry/node-ca-slmf7" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.054877 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/584b2672-fbcc-4c9c-9ead-fdf45d9d1fff-serviceca\") pod \"node-ca-slmf7\" (UID: \"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\") " pod="openshift-image-registry/node-ca-slmf7" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.054902 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjvpr\" (UniqueName: \"kubernetes.io/projected/584b2672-fbcc-4c9c-9ead-fdf45d9d1fff-kube-api-access-qjvpr\") pod \"node-ca-slmf7\" (UID: \"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\") " pod="openshift-image-registry/node-ca-slmf7" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.054926 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/4783f8c4-bc93-4f21-b88f-62167f7ec68b-hosts-file\") pod \"node-resolver-f8vtz\" (UID: \"4783f8c4-bc93-4f21-b88f-62167f7ec68b\") " pod="openshift-dns/node-resolver-f8vtz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.054957 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/584b2672-fbcc-4c9c-9ead-fdf45d9d1fff-host\") pod \"node-ca-slmf7\" (UID: \"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\") " pod="openshift-image-registry/node-ca-slmf7" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.054987 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/4783f8c4-bc93-4f21-b88f-62167f7ec68b-hosts-file\") pod \"node-resolver-f8vtz\" (UID: \"4783f8c4-bc93-4f21-b88f-62167f7ec68b\") " pod="openshift-dns/node-resolver-f8vtz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.056131 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.056982 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/584b2672-fbcc-4c9c-9ead-fdf45d9d1fff-serviceca\") pod \"node-ca-slmf7\" (UID: \"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\") " pod="openshift-image-registry/node-ca-slmf7" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.079409 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjvpr\" (UniqueName: \"kubernetes.io/projected/584b2672-fbcc-4c9c-9ead-fdf45d9d1fff-kube-api-access-qjvpr\") pod \"node-ca-slmf7\" (UID: \"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\") " pod="openshift-image-registry/node-ca-slmf7" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.085463 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.105466 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.121089 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.143468 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.174313 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.193384 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-slmf7" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.198829 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: W1001 05:29:33.212514 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod584b2672_fbcc_4c9c_9ead_fdf45d9d1fff.slice/crio-64fd8da6bb5a56a83bd1856c5bce4f9f253e854dd13c45de569370b4f0ec826a WatchSource:0}: Error finding container 64fd8da6bb5a56a83bd1856c5bce4f9f253e854dd13c45de569370b4f0ec826a: Status 404 returned error can't find the container with id 64fd8da6bb5a56a83bd1856c5bce4f9f253e854dd13c45de569370b4f0ec826a Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.220297 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.243382 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-l96mp"] Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.243710 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.245701 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.245939 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.246119 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.246231 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.249461 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.259929 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.269722 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.289356 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.305767 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.319360 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.329731 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.346578 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.362347 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/dc3b0e2f-f27e-4420-9323-ec45878c11a6-multus-daemon-config\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.362409 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dh82c\" (UniqueName: \"kubernetes.io/projected/dc3b0e2f-f27e-4420-9323-ec45878c11a6-kube-api-access-dh82c\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.362497 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-system-cni-dir\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.362531 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-cnibin\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.362556 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-host-run-multus-certs\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.362625 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-multus-socket-dir-parent\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.362693 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-host-var-lib-kubelet\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.362719 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-hostroot\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.362739 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-etc-kubernetes\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.362760 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-host-run-netns\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.362781 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-multus-conf-dir\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.362892 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-os-release\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.362942 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-host-run-k8s-cni-cncf-io\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.362957 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-host-var-lib-cni-multus\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.362976 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-multus-cni-dir\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.362992 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/dc3b0e2f-f27e-4420-9323-ec45878c11a6-cni-binary-copy\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.363006 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-host-var-lib-cni-bin\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.363704 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.378972 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.392203 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.406004 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.418057 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464336 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-multus-socket-dir-parent\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464383 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-host-var-lib-kubelet\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464404 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-hostroot\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464422 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-etc-kubernetes\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464451 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-host-run-netns\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464468 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-multus-conf-dir\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464490 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-multus-cni-dir\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464501 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-multus-socket-dir-parent\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464522 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-host-var-lib-kubelet\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464544 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-host-run-netns\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464513 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-etc-kubernetes\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464574 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-multus-conf-dir\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464510 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-os-release\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464682 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-host-run-k8s-cni-cncf-io\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464692 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-multus-cni-dir\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464708 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-host-var-lib-cni-multus\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464733 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/dc3b0e2f-f27e-4420-9323-ec45878c11a6-cni-binary-copy\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464742 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-host-run-k8s-cni-cncf-io\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464756 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-host-var-lib-cni-bin\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464785 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-host-var-lib-cni-bin\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464788 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-os-release\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464808 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-host-var-lib-cni-multus\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464813 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-system-cni-dir\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464765 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-hostroot\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464909 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-cnibin\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464912 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-system-cni-dir\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464936 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/dc3b0e2f-f27e-4420-9323-ec45878c11a6-multus-daemon-config\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.464944 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-cnibin\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.465010 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dh82c\" (UniqueName: \"kubernetes.io/projected/dc3b0e2f-f27e-4420-9323-ec45878c11a6-kube-api-access-dh82c\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.465056 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-host-run-multus-certs\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.465117 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/dc3b0e2f-f27e-4420-9323-ec45878c11a6-host-run-multus-certs\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.465601 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/dc3b0e2f-f27e-4420-9323-ec45878c11a6-cni-binary-copy\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.465649 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/dc3b0e2f-f27e-4420-9323-ec45878c11a6-multus-daemon-config\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.530990 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dh82c\" (UniqueName: \"kubernetes.io/projected/dc3b0e2f-f27e-4420-9323-ec45878c11a6-kube-api-access-dh82c\") pod \"multus-l96mp\" (UID: \"dc3b0e2f-f27e-4420-9323-ec45878c11a6\") " pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.557224 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-l96mp" Oct 01 05:29:33 crc kubenswrapper[4661]: W1001 05:29:33.568945 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc3b0e2f_f27e_4420_9323_ec45878c11a6.slice/crio-984bce6456cb0dd80c74797afcfd773ce5a884e67b05cd03b0342c60bf75b4ec WatchSource:0}: Error finding container 984bce6456cb0dd80c74797afcfd773ce5a884e67b05cd03b0342c60bf75b4ec: Status 404 returned error can't find the container with id 984bce6456cb0dd80c74797afcfd773ce5a884e67b05cd03b0342c60bf75b4ec Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.635316 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-cqptt"] Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.635900 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.637441 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.638175 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.639977 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-wp2wh"] Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.640432 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.642000 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.642575 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.642613 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.643568 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.643940 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.646888 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.654255 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-fj7kz"] Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.655201 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.657303 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.657460 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.657538 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.657716 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.657990 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.658555 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.660132 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.674615 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.694237 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.712049 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.737875 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.756580 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:29:33 crc kubenswrapper[4661]: E1001 05:29:33.756710 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.756592 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:33 crc kubenswrapper[4661]: E1001 05:29:33.756848 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.766601 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-var-lib-openvswitch\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.766654 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-log-socket\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.766674 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b037fd65-42ce-46b5-991d-d643006e1acf-cnibin\") pod \"multus-additional-cni-plugins-cqptt\" (UID: \"b037fd65-42ce-46b5-991d-d643006e1acf\") " pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.766690 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.766709 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-run-netns\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.766722 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-run-systemd\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.766737 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-slash\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.766751 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6834e918-6be2-4c19-ac03-80fa36a2659c-ovnkube-script-lib\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.766766 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqhtk\" (UniqueName: \"kubernetes.io/projected/6834e918-6be2-4c19-ac03-80fa36a2659c-kube-api-access-kqhtk\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.766780 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b037fd65-42ce-46b5-991d-d643006e1acf-cni-binary-copy\") pod \"multus-additional-cni-plugins-cqptt\" (UID: \"b037fd65-42ce-46b5-991d-d643006e1acf\") " pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.766795 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/7584c4bc-4202-487e-a2b4-4319f428a792-rootfs\") pod \"machine-config-daemon-wp2wh\" (UID: \"7584c4bc-4202-487e-a2b4-4319f428a792\") " pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.766819 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ss4n\" (UniqueName: \"kubernetes.io/projected/b037fd65-42ce-46b5-991d-d643006e1acf-kube-api-access-7ss4n\") pod \"multus-additional-cni-plugins-cqptt\" (UID: \"b037fd65-42ce-46b5-991d-d643006e1acf\") " pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.766833 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6834e918-6be2-4c19-ac03-80fa36a2659c-ovnkube-config\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.766848 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7584c4bc-4202-487e-a2b4-4319f428a792-proxy-tls\") pod \"machine-config-daemon-wp2wh\" (UID: \"7584c4bc-4202-487e-a2b4-4319f428a792\") " pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.766862 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-kubelet\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.766877 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7584c4bc-4202-487e-a2b4-4319f428a792-mcd-auth-proxy-config\") pod \"machine-config-daemon-wp2wh\" (UID: \"7584c4bc-4202-487e-a2b4-4319f428a792\") " pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.766897 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b037fd65-42ce-46b5-991d-d643006e1acf-os-release\") pod \"multus-additional-cni-plugins-cqptt\" (UID: \"b037fd65-42ce-46b5-991d-d643006e1acf\") " pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.766912 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-run-openvswitch\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.766925 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6834e918-6be2-4c19-ac03-80fa36a2659c-env-overrides\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.766940 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6834e918-6be2-4c19-ac03-80fa36a2659c-ovn-node-metrics-cert\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.766969 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-run-ovn-kubernetes\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.766986 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b037fd65-42ce-46b5-991d-d643006e1acf-system-cni-dir\") pod \"multus-additional-cni-plugins-cqptt\" (UID: \"b037fd65-42ce-46b5-991d-d643006e1acf\") " pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.767002 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b037fd65-42ce-46b5-991d-d643006e1acf-tuning-conf-dir\") pod \"multus-additional-cni-plugins-cqptt\" (UID: \"b037fd65-42ce-46b5-991d-d643006e1acf\") " pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.767016 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bd8c9\" (UniqueName: \"kubernetes.io/projected/7584c4bc-4202-487e-a2b4-4319f428a792-kube-api-access-bd8c9\") pod \"machine-config-daemon-wp2wh\" (UID: \"7584c4bc-4202-487e-a2b4-4319f428a792\") " pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.767031 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-systemd-units\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.767045 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-etc-openvswitch\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.767061 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-node-log\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.767101 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b037fd65-42ce-46b5-991d-d643006e1acf-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-cqptt\" (UID: \"b037fd65-42ce-46b5-991d-d643006e1acf\") " pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.767117 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-cni-netd\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.767132 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-run-ovn\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.767147 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-cni-bin\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.773193 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.816233 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.834376 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.850930 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.864443 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.867691 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b037fd65-42ce-46b5-991d-d643006e1acf-cni-binary-copy\") pod \"multus-additional-cni-plugins-cqptt\" (UID: \"b037fd65-42ce-46b5-991d-d643006e1acf\") " pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.867786 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/7584c4bc-4202-487e-a2b4-4319f428a792-rootfs\") pod \"machine-config-daemon-wp2wh\" (UID: \"7584c4bc-4202-487e-a2b4-4319f428a792\") " pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.867880 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ss4n\" (UniqueName: \"kubernetes.io/projected/b037fd65-42ce-46b5-991d-d643006e1acf-kube-api-access-7ss4n\") pod \"multus-additional-cni-plugins-cqptt\" (UID: \"b037fd65-42ce-46b5-991d-d643006e1acf\") " pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.867960 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6834e918-6be2-4c19-ac03-80fa36a2659c-ovnkube-config\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.867885 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/7584c4bc-4202-487e-a2b4-4319f428a792-rootfs\") pod \"machine-config-daemon-wp2wh\" (UID: \"7584c4bc-4202-487e-a2b4-4319f428a792\") " pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868033 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7584c4bc-4202-487e-a2b4-4319f428a792-proxy-tls\") pod \"machine-config-daemon-wp2wh\" (UID: \"7584c4bc-4202-487e-a2b4-4319f428a792\") " pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868110 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-kubelet\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868143 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7584c4bc-4202-487e-a2b4-4319f428a792-mcd-auth-proxy-config\") pod \"machine-config-daemon-wp2wh\" (UID: \"7584c4bc-4202-487e-a2b4-4319f428a792\") " pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868163 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6834e918-6be2-4c19-ac03-80fa36a2659c-ovn-node-metrics-cert\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868189 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b037fd65-42ce-46b5-991d-d643006e1acf-os-release\") pod \"multus-additional-cni-plugins-cqptt\" (UID: \"b037fd65-42ce-46b5-991d-d643006e1acf\") " pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868204 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-run-openvswitch\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868219 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6834e918-6be2-4c19-ac03-80fa36a2659c-env-overrides\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868257 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-node-log\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868275 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-run-ovn-kubernetes\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868291 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b037fd65-42ce-46b5-991d-d643006e1acf-system-cni-dir\") pod \"multus-additional-cni-plugins-cqptt\" (UID: \"b037fd65-42ce-46b5-991d-d643006e1acf\") " pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868306 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b037fd65-42ce-46b5-991d-d643006e1acf-tuning-conf-dir\") pod \"multus-additional-cni-plugins-cqptt\" (UID: \"b037fd65-42ce-46b5-991d-d643006e1acf\") " pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868321 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bd8c9\" (UniqueName: \"kubernetes.io/projected/7584c4bc-4202-487e-a2b4-4319f428a792-kube-api-access-bd8c9\") pod \"machine-config-daemon-wp2wh\" (UID: \"7584c4bc-4202-487e-a2b4-4319f428a792\") " pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868335 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-systemd-units\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868349 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-etc-openvswitch\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868375 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b037fd65-42ce-46b5-991d-d643006e1acf-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-cqptt\" (UID: \"b037fd65-42ce-46b5-991d-d643006e1acf\") " pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868390 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-cni-netd\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868408 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-run-ovn\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868424 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-cni-bin\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868449 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-var-lib-openvswitch\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868464 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-log-socket\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868482 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b037fd65-42ce-46b5-991d-d643006e1acf-cnibin\") pod \"multus-additional-cni-plugins-cqptt\" (UID: \"b037fd65-42ce-46b5-991d-d643006e1acf\") " pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868498 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868516 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-run-netns\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868535 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-run-systemd\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868551 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-slash\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868551 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-kubelet\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868649 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b037fd65-42ce-46b5-991d-d643006e1acf-os-release\") pod \"multus-additional-cni-plugins-cqptt\" (UID: \"b037fd65-42ce-46b5-991d-d643006e1acf\") " pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868676 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-run-openvswitch\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868902 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6834e918-6be2-4c19-ac03-80fa36a2659c-ovnkube-config\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868566 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6834e918-6be2-4c19-ac03-80fa36a2659c-ovnkube-script-lib\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.869000 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqhtk\" (UniqueName: \"kubernetes.io/projected/6834e918-6be2-4c19-ac03-80fa36a2659c-kube-api-access-kqhtk\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.869090 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6834e918-6be2-4c19-ac03-80fa36a2659c-ovnkube-script-lib\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.869154 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-run-ovn\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.869158 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6834e918-6be2-4c19-ac03-80fa36a2659c-env-overrides\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.869181 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-cni-bin\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.869210 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-var-lib-openvswitch\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.869214 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-systemd-units\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.869233 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-log-socket\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.869249 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-cni-netd\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.869256 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b037fd65-42ce-46b5-991d-d643006e1acf-cnibin\") pod \"multus-additional-cni-plugins-cqptt\" (UID: \"b037fd65-42ce-46b5-991d-d643006e1acf\") " pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.869282 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.869284 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-etc-openvswitch\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.868509 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b037fd65-42ce-46b5-991d-d643006e1acf-cni-binary-copy\") pod \"multus-additional-cni-plugins-cqptt\" (UID: \"b037fd65-42ce-46b5-991d-d643006e1acf\") " pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.869301 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-run-netns\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.869320 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-run-ovn-kubernetes\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.869335 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-run-systemd\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.869356 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-node-log\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.869366 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-slash\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.869381 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b037fd65-42ce-46b5-991d-d643006e1acf-system-cni-dir\") pod \"multus-additional-cni-plugins-cqptt\" (UID: \"b037fd65-42ce-46b5-991d-d643006e1acf\") " pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.869444 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b037fd65-42ce-46b5-991d-d643006e1acf-tuning-conf-dir\") pod \"multus-additional-cni-plugins-cqptt\" (UID: \"b037fd65-42ce-46b5-991d-d643006e1acf\") " pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.869451 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7584c4bc-4202-487e-a2b4-4319f428a792-mcd-auth-proxy-config\") pod \"machine-config-daemon-wp2wh\" (UID: \"7584c4bc-4202-487e-a2b4-4319f428a792\") " pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.869602 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b037fd65-42ce-46b5-991d-d643006e1acf-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-cqptt\" (UID: \"b037fd65-42ce-46b5-991d-d643006e1acf\") " pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.871385 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6834e918-6be2-4c19-ac03-80fa36a2659c-ovn-node-metrics-cert\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.872562 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7584c4bc-4202-487e-a2b4-4319f428a792-proxy-tls\") pod \"machine-config-daemon-wp2wh\" (UID: \"7584c4bc-4202-487e-a2b4-4319f428a792\") " pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.873156 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.875519 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmx9v\" (UniqueName: \"kubernetes.io/projected/4783f8c4-bc93-4f21-b88f-62167f7ec68b-kube-api-access-xmx9v\") pod \"node-resolver-f8vtz\" (UID: \"4783f8c4-bc93-4f21-b88f-62167f7ec68b\") " pod="openshift-dns/node-resolver-f8vtz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.878314 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.881297 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ss4n\" (UniqueName: \"kubernetes.io/projected/b037fd65-42ce-46b5-991d-d643006e1acf-kube-api-access-7ss4n\") pod \"multus-additional-cni-plugins-cqptt\" (UID: \"b037fd65-42ce-46b5-991d-d643006e1acf\") " pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.882745 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bd8c9\" (UniqueName: \"kubernetes.io/projected/7584c4bc-4202-487e-a2b4-4319f428a792-kube-api-access-bd8c9\") pod \"machine-config-daemon-wp2wh\" (UID: \"7584c4bc-4202-487e-a2b4-4319f428a792\") " pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.884327 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqhtk\" (UniqueName: \"kubernetes.io/projected/6834e918-6be2-4c19-ac03-80fa36a2659c-kube-api-access-kqhtk\") pod \"ovnkube-node-fj7kz\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.890303 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.894705 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l96mp" event={"ID":"dc3b0e2f-f27e-4420-9323-ec45878c11a6","Type":"ContainerStarted","Data":"984bce6456cb0dd80c74797afcfd773ce5a884e67b05cd03b0342c60bf75b4ec"} Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.895880 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-slmf7" event={"ID":"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff","Type":"ContainerStarted","Data":"2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26"} Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.895904 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-slmf7" event={"ID":"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff","Type":"ContainerStarted","Data":"64fd8da6bb5a56a83bd1856c5bce4f9f253e854dd13c45de569370b4f0ec826a"} Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.899970 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.908859 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.917596 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.920299 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.936072 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.949805 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.960896 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.962043 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-cqptt" Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.973276 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:33 crc kubenswrapper[4661]: W1001 05:29:33.973363 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb037fd65_42ce_46b5_991d_d643006e1acf.slice/crio-f4ca789dac09d471f10fa4dacdf690054988c9ab8fd53e83aeaddb2742ff9983 WatchSource:0}: Error finding container f4ca789dac09d471f10fa4dacdf690054988c9ab8fd53e83aeaddb2742ff9983: Status 404 returned error can't find the container with id f4ca789dac09d471f10fa4dacdf690054988c9ab8fd53e83aeaddb2742ff9983 Oct 01 05:29:33 crc kubenswrapper[4661]: I1001 05:29:33.988558 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.006687 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.022135 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.034708 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.039266 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.044920 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:34 crc kubenswrapper[4661]: W1001 05:29:34.053134 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7584c4bc_4202_487e_a2b4_4319f428a792.slice/crio-7036d4fac8913adf3d4e6ce60975a1449a43324d5742948022f58eb62c4a3af3 WatchSource:0}: Error finding container 7036d4fac8913adf3d4e6ce60975a1449a43324d5742948022f58eb62c4a3af3: Status 404 returned error can't find the container with id 7036d4fac8913adf3d4e6ce60975a1449a43324d5742948022f58eb62c4a3af3 Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.053132 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.124072 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-f8vtz" Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.133356 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.152036 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.184495 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.216277 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.253130 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.756320 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:29:34 crc kubenswrapper[4661]: E1001 05:29:34.756454 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.878899 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:29:34 crc kubenswrapper[4661]: E1001 05:29:34.879087 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:29:38.879027936 +0000 UTC m=+27.817006590 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.879146 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:34 crc kubenswrapper[4661]: E1001 05:29:34.879265 4661 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 05:29:34 crc kubenswrapper[4661]: E1001 05:29:34.879343 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:38.879327653 +0000 UTC m=+27.817306307 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.905543 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5"} Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.908712 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l96mp" event={"ID":"dc3b0e2f-f27e-4420-9323-ec45878c11a6","Type":"ContainerStarted","Data":"caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f"} Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.911938 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-f8vtz" event={"ID":"4783f8c4-bc93-4f21-b88f-62167f7ec68b","Type":"ContainerStarted","Data":"5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d"} Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.911971 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-f8vtz" event={"ID":"4783f8c4-bc93-4f21-b88f-62167f7ec68b","Type":"ContainerStarted","Data":"ce5d86ce949b6fd76fb1e0bb0b0bd3544b0f3445851465ac7df999bdc2b6c542"} Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.914000 4661 generic.go:334] "Generic (PLEG): container finished" podID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerID="01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006" exitCode=0 Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.914051 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerDied","Data":"01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006"} Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.914067 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerStarted","Data":"2b30a8b4664ce0f91994e2edb6cf7edd82ace404222ec65878b7b332991044bb"} Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.919299 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e"} Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.919341 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb"} Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.919352 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"7036d4fac8913adf3d4e6ce60975a1449a43324d5742948022f58eb62c4a3af3"} Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.921815 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" event={"ID":"b037fd65-42ce-46b5-991d-d643006e1acf","Type":"ContainerStarted","Data":"e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90"} Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.921894 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" event={"ID":"b037fd65-42ce-46b5-991d-d643006e1acf","Type":"ContainerStarted","Data":"f4ca789dac09d471f10fa4dacdf690054988c9ab8fd53e83aeaddb2742ff9983"} Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.924799 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.940989 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.964509 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.978881 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.980290 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.980496 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:29:34 crc kubenswrapper[4661]: I1001 05:29:34.980554 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:34 crc kubenswrapper[4661]: E1001 05:29:34.980808 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 05:29:34 crc kubenswrapper[4661]: E1001 05:29:34.980834 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 05:29:34 crc kubenswrapper[4661]: E1001 05:29:34.980848 4661 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:34 crc kubenswrapper[4661]: E1001 05:29:34.980897 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:38.980881992 +0000 UTC m=+27.918860616 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:34 crc kubenswrapper[4661]: E1001 05:29:34.981800 4661 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 05:29:34 crc kubenswrapper[4661]: E1001 05:29:34.981910 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:38.981880588 +0000 UTC m=+27.919859242 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 05:29:34 crc kubenswrapper[4661]: E1001 05:29:34.982107 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 05:29:34 crc kubenswrapper[4661]: E1001 05:29:34.982132 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 05:29:34 crc kubenswrapper[4661]: E1001 05:29:34.982146 4661 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:34 crc kubenswrapper[4661]: E1001 05:29:34.982180 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:38.982169936 +0000 UTC m=+27.920148560 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.005420 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.042742 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.062402 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.083506 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.097878 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.112132 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.125899 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.145304 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.174689 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.191871 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.207759 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.246524 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.256310 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.270751 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.282713 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.320874 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.349925 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.362754 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.380451 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.391658 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.413561 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.428577 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.440834 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.455831 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.472675 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.489260 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.756022 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.756022 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:29:35 crc kubenswrapper[4661]: E1001 05:29:35.756179 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:29:35 crc kubenswrapper[4661]: E1001 05:29:35.756317 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.929018 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerStarted","Data":"71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca"} Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.929099 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerStarted","Data":"6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590"} Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.929121 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerStarted","Data":"a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8"} Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.930835 4661 generic.go:334] "Generic (PLEG): container finished" podID="b037fd65-42ce-46b5-991d-d643006e1acf" containerID="e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90" exitCode=0 Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.930910 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" event={"ID":"b037fd65-42ce-46b5-991d-d643006e1acf","Type":"ContainerDied","Data":"e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90"} Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.957376 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.979678 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:35 crc kubenswrapper[4661]: I1001 05:29:35.993233 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:36 crc kubenswrapper[4661]: I1001 05:29:36.006240 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:36Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:36 crc kubenswrapper[4661]: I1001 05:29:36.022834 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:36Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:36 crc kubenswrapper[4661]: I1001 05:29:36.041019 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:36Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:36 crc kubenswrapper[4661]: I1001 05:29:36.053014 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:36Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:36 crc kubenswrapper[4661]: I1001 05:29:36.070339 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:36Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:36 crc kubenswrapper[4661]: I1001 05:29:36.082379 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:36Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:36 crc kubenswrapper[4661]: I1001 05:29:36.093987 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:36Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:36 crc kubenswrapper[4661]: I1001 05:29:36.104285 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:36Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:36 crc kubenswrapper[4661]: I1001 05:29:36.113135 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:36Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:36 crc kubenswrapper[4661]: I1001 05:29:36.132501 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:36Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:36 crc kubenswrapper[4661]: I1001 05:29:36.143767 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:36Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:36 crc kubenswrapper[4661]: I1001 05:29:36.162286 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:36Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:36 crc kubenswrapper[4661]: I1001 05:29:36.756289 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:29:36 crc kubenswrapper[4661]: E1001 05:29:36.756468 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:29:36 crc kubenswrapper[4661]: I1001 05:29:36.939172 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerStarted","Data":"a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536"} Oct 01 05:29:36 crc kubenswrapper[4661]: I1001 05:29:36.939237 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerStarted","Data":"a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea"} Oct 01 05:29:36 crc kubenswrapper[4661]: I1001 05:29:36.942090 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" event={"ID":"b037fd65-42ce-46b5-991d-d643006e1acf","Type":"ContainerStarted","Data":"f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247"} Oct 01 05:29:36 crc kubenswrapper[4661]: I1001 05:29:36.977150 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:36Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:36 crc kubenswrapper[4661]: I1001 05:29:36.996440 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:36Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.008342 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.017976 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.030335 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.041880 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.059587 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.071325 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.089964 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.104228 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.117241 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.127667 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.147387 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.160843 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.179453 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.612625 4661 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.614233 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.614283 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.614302 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.614420 4661 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.621042 4661 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.621121 4661 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.621916 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.621939 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.621947 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.621960 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.621986 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:37Z","lastTransitionTime":"2025-10-01T05:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:37 crc kubenswrapper[4661]: E1001 05:29:37.641842 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.649609 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.649665 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.649678 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.649710 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.649721 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:37Z","lastTransitionTime":"2025-10-01T05:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:37 crc kubenswrapper[4661]: E1001 05:29:37.661934 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.664974 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.665001 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.665013 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.665031 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.665043 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:37Z","lastTransitionTime":"2025-10-01T05:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:37 crc kubenswrapper[4661]: E1001 05:29:37.682004 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.686738 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.686870 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.686944 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.687039 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.687111 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:37Z","lastTransitionTime":"2025-10-01T05:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:37 crc kubenswrapper[4661]: E1001 05:29:37.703037 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.706985 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.707066 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.707078 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.707099 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.707111 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:37Z","lastTransitionTime":"2025-10-01T05:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:37 crc kubenswrapper[4661]: E1001 05:29:37.719298 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: E1001 05:29:37.719463 4661 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.721192 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.721233 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.721244 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.721263 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.721278 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:37Z","lastTransitionTime":"2025-10-01T05:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.756724 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.756750 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:37 crc kubenswrapper[4661]: E1001 05:29:37.756856 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:29:37 crc kubenswrapper[4661]: E1001 05:29:37.756995 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.824159 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.824196 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.824209 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.824224 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.824235 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:37Z","lastTransitionTime":"2025-10-01T05:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.839774 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.845345 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.853114 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.873282 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.891142 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.909010 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.926617 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.926686 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.926696 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.926711 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.926720 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:37Z","lastTransitionTime":"2025-10-01T05:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.927222 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.943829 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.949914 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerStarted","Data":"a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e"} Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.965407 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:37 crc kubenswrapper[4661]: I1001 05:29:37.981896 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:37Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.065490 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.077304 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.077355 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.077364 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.077379 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.077388 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:38Z","lastTransitionTime":"2025-10-01T05:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.078203 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.095498 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.109404 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.122311 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.138693 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.159369 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.176367 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.179089 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.179117 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.179127 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.179144 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.179156 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:38Z","lastTransitionTime":"2025-10-01T05:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.194110 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.208311 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.232681 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.245684 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.269533 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.283137 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.283183 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.283194 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.283211 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.283224 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:38Z","lastTransitionTime":"2025-10-01T05:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.303716 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.318498 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.329795 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.345955 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.361944 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.387877 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.387924 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.387956 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.387974 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.387987 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:38Z","lastTransitionTime":"2025-10-01T05:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.393651 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.413028 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.432624 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.444690 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.491361 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.491434 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.491460 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.491490 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.491513 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:38Z","lastTransitionTime":"2025-10-01T05:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.595382 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.595447 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.595471 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.595506 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.595527 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:38Z","lastTransitionTime":"2025-10-01T05:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.699509 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.699954 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.700096 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.700249 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.700371 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:38Z","lastTransitionTime":"2025-10-01T05:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.756470 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:29:38 crc kubenswrapper[4661]: E1001 05:29:38.756697 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.804450 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.804856 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.804875 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.804901 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.804919 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:38Z","lastTransitionTime":"2025-10-01T05:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.907918 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.907964 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.907981 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.908004 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.908023 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:38Z","lastTransitionTime":"2025-10-01T05:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.947347 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:29:38 crc kubenswrapper[4661]: E1001 05:29:38.947539 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:29:46.94750109 +0000 UTC m=+35.885479744 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.947614 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:38 crc kubenswrapper[4661]: E1001 05:29:38.947756 4661 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 05:29:38 crc kubenswrapper[4661]: E1001 05:29:38.947843 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:46.947817359 +0000 UTC m=+35.885796003 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.957529 4661 generic.go:334] "Generic (PLEG): container finished" podID="b037fd65-42ce-46b5-991d-d643006e1acf" containerID="f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247" exitCode=0 Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.957586 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" event={"ID":"b037fd65-42ce-46b5-991d-d643006e1acf","Type":"ContainerDied","Data":"f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247"} Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.973705 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:38 crc kubenswrapper[4661]: I1001 05:29:38.994614 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:38Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.010460 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.010523 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.010541 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.010567 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.010587 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:39Z","lastTransitionTime":"2025-10-01T05:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.016548 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:39Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.038353 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:39Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.048776 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.048847 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.048947 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:29:39 crc kubenswrapper[4661]: E1001 05:29:39.049066 4661 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 05:29:39 crc kubenswrapper[4661]: E1001 05:29:39.049156 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 05:29:39 crc kubenswrapper[4661]: E1001 05:29:39.049184 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:47.04915467 +0000 UTC m=+35.987133324 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 05:29:39 crc kubenswrapper[4661]: E1001 05:29:39.049189 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 05:29:39 crc kubenswrapper[4661]: E1001 05:29:39.049218 4661 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:39 crc kubenswrapper[4661]: E1001 05:29:39.049295 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:47.049265853 +0000 UTC m=+35.987244497 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:39 crc kubenswrapper[4661]: E1001 05:29:39.049352 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 05:29:39 crc kubenswrapper[4661]: E1001 05:29:39.049371 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 05:29:39 crc kubenswrapper[4661]: E1001 05:29:39.049386 4661 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:39 crc kubenswrapper[4661]: E1001 05:29:39.049442 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:47.049427847 +0000 UTC m=+35.987406491 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.056263 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:39Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.079234 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:39Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.102657 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:39Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.117466 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.117514 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.117531 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.117554 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.117572 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:39Z","lastTransitionTime":"2025-10-01T05:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.125344 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:39Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.147293 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:39Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.167096 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:39Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.187931 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:39Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.220693 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.220729 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.220742 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.220762 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.220776 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:39Z","lastTransitionTime":"2025-10-01T05:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.222424 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:39Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.261242 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:39Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.278700 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:39Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.295994 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:39Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.323180 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.323231 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.323249 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.323271 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.323288 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:39Z","lastTransitionTime":"2025-10-01T05:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.426436 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.426492 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.426509 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.426533 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.426547 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:39Z","lastTransitionTime":"2025-10-01T05:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.533323 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.533375 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.533392 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.533416 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.533433 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:39Z","lastTransitionTime":"2025-10-01T05:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.636576 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.636665 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.636685 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.636708 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.636725 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:39Z","lastTransitionTime":"2025-10-01T05:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.739788 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.739845 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.739862 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.739886 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.739904 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:39Z","lastTransitionTime":"2025-10-01T05:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.756870 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.757002 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:39 crc kubenswrapper[4661]: E1001 05:29:39.757421 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:29:39 crc kubenswrapper[4661]: E1001 05:29:39.757581 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.843374 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.843440 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.843460 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.843485 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.843504 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:39Z","lastTransitionTime":"2025-10-01T05:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.946912 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.947013 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.947035 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.947059 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:39 crc kubenswrapper[4661]: I1001 05:29:39.947076 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:39Z","lastTransitionTime":"2025-10-01T05:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.049786 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.049850 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.049868 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.049894 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.049912 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:40Z","lastTransitionTime":"2025-10-01T05:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.152716 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.152769 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.152786 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.152812 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.152829 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:40Z","lastTransitionTime":"2025-10-01T05:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.256033 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.256095 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.256118 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.256150 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.256174 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:40Z","lastTransitionTime":"2025-10-01T05:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.359521 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.359578 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.359596 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.359623 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.359675 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:40Z","lastTransitionTime":"2025-10-01T05:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.462417 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.462497 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.462519 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.462549 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.462572 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:40Z","lastTransitionTime":"2025-10-01T05:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.565298 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.565365 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.565385 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.565412 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.565431 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:40Z","lastTransitionTime":"2025-10-01T05:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.668396 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.668443 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.668456 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.668475 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.668486 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:40Z","lastTransitionTime":"2025-10-01T05:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.756792 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:29:40 crc kubenswrapper[4661]: E1001 05:29:40.757002 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.770460 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.770505 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.770517 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.770533 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.770544 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:40Z","lastTransitionTime":"2025-10-01T05:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.873654 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.873707 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.873721 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.873742 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.873756 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:40Z","lastTransitionTime":"2025-10-01T05:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.993762 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.993835 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.993864 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.993902 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:40 crc kubenswrapper[4661]: I1001 05:29:40.993921 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:40Z","lastTransitionTime":"2025-10-01T05:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:40.999962 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerStarted","Data":"4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670"} Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.003408 4661 generic.go:334] "Generic (PLEG): container finished" podID="b037fd65-42ce-46b5-991d-d643006e1acf" containerID="e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888" exitCode=0 Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.003471 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" event={"ID":"b037fd65-42ce-46b5-991d-d643006e1acf","Type":"ContainerDied","Data":"e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888"} Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.017698 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.036448 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.054337 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.072952 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.091421 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.097584 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.097658 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.097676 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.097698 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.097714 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:41Z","lastTransitionTime":"2025-10-01T05:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.109489 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.125700 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.143342 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.155723 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.199280 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.203569 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.203658 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.203679 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.203703 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.203719 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:41Z","lastTransitionTime":"2025-10-01T05:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.241831 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.265377 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.285616 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.297451 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.305929 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.305958 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.305970 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.305984 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.305993 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:41Z","lastTransitionTime":"2025-10-01T05:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.310816 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.408042 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.408083 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.408096 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.408115 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.408128 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:41Z","lastTransitionTime":"2025-10-01T05:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.511979 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.512055 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.512074 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.512103 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.512126 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:41Z","lastTransitionTime":"2025-10-01T05:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.615610 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.615865 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.615912 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.615941 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.615960 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:41Z","lastTransitionTime":"2025-10-01T05:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.719793 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.719887 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.719899 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.719923 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.719934 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:41Z","lastTransitionTime":"2025-10-01T05:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.756325 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.756440 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:29:41 crc kubenswrapper[4661]: E1001 05:29:41.756511 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:29:41 crc kubenswrapper[4661]: E1001 05:29:41.756902 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.784860 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.807355 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.822078 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.822119 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.822132 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.822155 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.822169 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:41Z","lastTransitionTime":"2025-10-01T05:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.832507 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.849045 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.865777 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.892604 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.912853 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.925065 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.925101 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.925112 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.925133 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.925149 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:41Z","lastTransitionTime":"2025-10-01T05:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.927177 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.948469 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.964552 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:41 crc kubenswrapper[4661]: I1001 05:29:41.987118 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.009911 4661 generic.go:334] "Generic (PLEG): container finished" podID="b037fd65-42ce-46b5-991d-d643006e1acf" containerID="c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e" exitCode=0 Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.009999 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" event={"ID":"b037fd65-42ce-46b5-991d-d643006e1acf","Type":"ContainerDied","Data":"c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e"} Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.015548 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.027387 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.027419 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.027431 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.027447 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.027459 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:42Z","lastTransitionTime":"2025-10-01T05:29:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.033375 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.054223 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.069962 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.100100 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.121448 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.131723 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.132081 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.132281 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.132512 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.132741 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:42Z","lastTransitionTime":"2025-10-01T05:29:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.140679 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.166221 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.189941 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.218377 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.236179 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.236212 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.236223 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.236240 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.236252 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:42Z","lastTransitionTime":"2025-10-01T05:29:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.237995 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.254281 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.267922 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.281661 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.295153 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.317315 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.338872 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.338983 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.339010 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.339043 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.339069 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:42Z","lastTransitionTime":"2025-10-01T05:29:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.339902 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.363393 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.383132 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.443549 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.443611 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.443660 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.443695 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.443725 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:42Z","lastTransitionTime":"2025-10-01T05:29:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.546952 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.546984 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.546994 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.547010 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.547021 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:42Z","lastTransitionTime":"2025-10-01T05:29:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.650340 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.650570 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.650577 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.650589 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.650597 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:42Z","lastTransitionTime":"2025-10-01T05:29:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.753867 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.753922 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.753949 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.753975 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.753991 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:42Z","lastTransitionTime":"2025-10-01T05:29:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.756204 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:29:42 crc kubenswrapper[4661]: E1001 05:29:42.756357 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.864498 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.864900 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.865393 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.865615 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.865848 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:42Z","lastTransitionTime":"2025-10-01T05:29:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.969097 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.969496 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.969672 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.969794 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:42 crc kubenswrapper[4661]: I1001 05:29:42.970091 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:42Z","lastTransitionTime":"2025-10-01T05:29:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.021304 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" event={"ID":"b037fd65-42ce-46b5-991d-d643006e1acf","Type":"ContainerStarted","Data":"d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d"} Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.037092 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:43Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.052141 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:43Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.078481 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:43Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.080529 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.080563 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.080575 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.080595 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.080610 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:43Z","lastTransitionTime":"2025-10-01T05:29:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.093007 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:43Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.118295 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:43Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.153759 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:43Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.174318 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:43Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.183686 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.183886 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.183966 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.184065 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.184169 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:43Z","lastTransitionTime":"2025-10-01T05:29:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.189528 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:43Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.208765 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:43Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.228494 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:43Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.250726 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:43Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.272726 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:43Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.286820 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.286860 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.286874 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.286904 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.286916 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:43Z","lastTransitionTime":"2025-10-01T05:29:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.293071 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:43Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.310258 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:43Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.325256 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:43Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.390265 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.390329 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.390338 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.390359 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.390374 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:43Z","lastTransitionTime":"2025-10-01T05:29:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.492712 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.493171 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.493179 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.493191 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.493200 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:43Z","lastTransitionTime":"2025-10-01T05:29:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.597724 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.597925 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.598050 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.598183 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.598269 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:43Z","lastTransitionTime":"2025-10-01T05:29:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.701218 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.701290 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.701325 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.701353 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.701374 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:43Z","lastTransitionTime":"2025-10-01T05:29:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.756171 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.756344 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:43 crc kubenswrapper[4661]: E1001 05:29:43.756547 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:29:43 crc kubenswrapper[4661]: E1001 05:29:43.756739 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.804191 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.804248 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.804265 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.804291 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.804309 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:43Z","lastTransitionTime":"2025-10-01T05:29:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.908073 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.908139 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.908158 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.908185 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:43 crc kubenswrapper[4661]: I1001 05:29:43.908206 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:43Z","lastTransitionTime":"2025-10-01T05:29:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.012035 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.012135 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.012159 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.012190 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.012210 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:44Z","lastTransitionTime":"2025-10-01T05:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.039358 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerStarted","Data":"49ceaae4385aad2f35d1f5dc48e03067efd1864254b7452239c3242d2d424e87"} Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.039924 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.039964 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.045566 4661 generic.go:334] "Generic (PLEG): container finished" podID="b037fd65-42ce-46b5-991d-d643006e1acf" containerID="d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d" exitCode=0 Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.045665 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" event={"ID":"b037fd65-42ce-46b5-991d-d643006e1acf","Type":"ContainerDied","Data":"d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d"} Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.062230 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.085628 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.099255 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.100314 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.108680 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.115299 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.115338 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.115350 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.115370 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.115384 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:44Z","lastTransitionTime":"2025-10-01T05:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.131024 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.162158 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.186130 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.200598 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.219662 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.219705 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.219716 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.219732 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.219744 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:44Z","lastTransitionTime":"2025-10-01T05:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.225252 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.248023 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.276494 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49ceaae4385aad2f35d1f5dc48e03067efd1864254b7452239c3242d2d424e87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.292992 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.312200 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.321872 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.321914 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.321924 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.321940 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.321951 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:44Z","lastTransitionTime":"2025-10-01T05:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.328931 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.354801 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.368733 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.383312 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.403445 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.425408 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.425901 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.425937 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.425951 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.425970 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.425984 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:44Z","lastTransitionTime":"2025-10-01T05:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.445301 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.468515 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.487650 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.502095 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.516561 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.528722 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.528781 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.528802 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.528828 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.528847 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:44Z","lastTransitionTime":"2025-10-01T05:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.532839 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.552737 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.570914 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.608607 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49ceaae4385aad2f35d1f5dc48e03067efd1864254b7452239c3242d2d424e87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.633775 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.634073 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.634233 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.634417 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.634579 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:44Z","lastTransitionTime":"2025-10-01T05:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.640061 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.657979 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.672388 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:44Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.737668 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.737945 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.738114 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.738252 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.738368 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:44Z","lastTransitionTime":"2025-10-01T05:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.755956 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:29:44 crc kubenswrapper[4661]: E1001 05:29:44.756138 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.841116 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.841182 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.841196 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.841214 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.841228 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:44Z","lastTransitionTime":"2025-10-01T05:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.950785 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.950874 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.950894 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.950917 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:44 crc kubenswrapper[4661]: I1001 05:29:44.950934 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:44Z","lastTransitionTime":"2025-10-01T05:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.058232 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.058288 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.058300 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.058320 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.058331 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:45Z","lastTransitionTime":"2025-10-01T05:29:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.062748 4661 generic.go:334] "Generic (PLEG): container finished" podID="b037fd65-42ce-46b5-991d-d643006e1acf" containerID="76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782" exitCode=0 Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.062821 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" event={"ID":"b037fd65-42ce-46b5-991d-d643006e1acf","Type":"ContainerDied","Data":"76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782"} Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.062984 4661 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.100256 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:45Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.125613 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:45Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.148515 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:45Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.167118 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:45Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.167482 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.167510 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.167525 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.167546 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.167562 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:45Z","lastTransitionTime":"2025-10-01T05:29:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.189822 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:45Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.213118 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:45Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.229989 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:45Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.249817 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:45Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.265911 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:45Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.270109 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.270162 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.270181 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.270205 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.270223 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:45Z","lastTransitionTime":"2025-10-01T05:29:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.281176 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:45Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.296787 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:45Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.309119 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:45Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.326595 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:45Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.345870 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:45Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.373724 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.373779 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.373799 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.373841 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.373861 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:45Z","lastTransitionTime":"2025-10-01T05:29:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.374821 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49ceaae4385aad2f35d1f5dc48e03067efd1864254b7452239c3242d2d424e87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:45Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.476278 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.476320 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.476331 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.476348 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.476362 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:45Z","lastTransitionTime":"2025-10-01T05:29:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.580842 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.581495 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.581535 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.581572 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.581592 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:45Z","lastTransitionTime":"2025-10-01T05:29:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.684308 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.684696 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.684897 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.684996 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.685099 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:45Z","lastTransitionTime":"2025-10-01T05:29:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.756139 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.756237 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:45 crc kubenswrapper[4661]: E1001 05:29:45.756262 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:29:45 crc kubenswrapper[4661]: E1001 05:29:45.756420 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.787853 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.787911 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.787924 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.787945 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.787962 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:45Z","lastTransitionTime":"2025-10-01T05:29:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.890984 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.891057 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.891076 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.891103 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.891122 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:45Z","lastTransitionTime":"2025-10-01T05:29:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.993798 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.993863 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.993882 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.993908 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:45 crc kubenswrapper[4661]: I1001 05:29:45.993928 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:45Z","lastTransitionTime":"2025-10-01T05:29:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.072164 4661 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.073396 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" event={"ID":"b037fd65-42ce-46b5-991d-d643006e1acf","Type":"ContainerStarted","Data":"b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca"} Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.097120 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.097184 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.097204 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.097232 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.097384 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:46Z","lastTransitionTime":"2025-10-01T05:29:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.109784 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.129981 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.151436 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.170756 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.199740 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.201541 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.201579 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.201596 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.201617 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.201660 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:46Z","lastTransitionTime":"2025-10-01T05:29:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.216253 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.243491 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.269305 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.284247 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.295846 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.304369 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.304399 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.304407 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.304422 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.304431 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:46Z","lastTransitionTime":"2025-10-01T05:29:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.307024 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.325342 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49ceaae4385aad2f35d1f5dc48e03067efd1864254b7452239c3242d2d424e87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.339980 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.352111 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.370861 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.406728 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.406780 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.406794 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.406888 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.406931 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:46Z","lastTransitionTime":"2025-10-01T05:29:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.482760 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk"] Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.483470 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.486390 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.488591 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.509742 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.509795 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.509814 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.509840 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.509861 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:46Z","lastTransitionTime":"2025-10-01T05:29:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.528103 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.548716 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.564967 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/56b04919-b144-4049-8ccd-e6de8aaa48a3-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-mlbtk\" (UID: \"56b04919-b144-4049-8ccd-e6de8aaa48a3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.565009 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xh556\" (UniqueName: \"kubernetes.io/projected/56b04919-b144-4049-8ccd-e6de8aaa48a3-kube-api-access-xh556\") pod \"ovnkube-control-plane-749d76644c-mlbtk\" (UID: \"56b04919-b144-4049-8ccd-e6de8aaa48a3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.565031 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/56b04919-b144-4049-8ccd-e6de8aaa48a3-env-overrides\") pod \"ovnkube-control-plane-749d76644c-mlbtk\" (UID: \"56b04919-b144-4049-8ccd-e6de8aaa48a3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.565204 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/56b04919-b144-4049-8ccd-e6de8aaa48a3-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-mlbtk\" (UID: \"56b04919-b144-4049-8ccd-e6de8aaa48a3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.572174 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.587213 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.606844 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.611736 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.611774 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.611785 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.611802 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.611816 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:46Z","lastTransitionTime":"2025-10-01T05:29:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.621242 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.635215 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.647981 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.661549 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.667020 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xh556\" (UniqueName: \"kubernetes.io/projected/56b04919-b144-4049-8ccd-e6de8aaa48a3-kube-api-access-xh556\") pod \"ovnkube-control-plane-749d76644c-mlbtk\" (UID: \"56b04919-b144-4049-8ccd-e6de8aaa48a3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.667095 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/56b04919-b144-4049-8ccd-e6de8aaa48a3-env-overrides\") pod \"ovnkube-control-plane-749d76644c-mlbtk\" (UID: \"56b04919-b144-4049-8ccd-e6de8aaa48a3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.667159 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/56b04919-b144-4049-8ccd-e6de8aaa48a3-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-mlbtk\" (UID: \"56b04919-b144-4049-8ccd-e6de8aaa48a3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.667271 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/56b04919-b144-4049-8ccd-e6de8aaa48a3-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-mlbtk\" (UID: \"56b04919-b144-4049-8ccd-e6de8aaa48a3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.667978 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/56b04919-b144-4049-8ccd-e6de8aaa48a3-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-mlbtk\" (UID: \"56b04919-b144-4049-8ccd-e6de8aaa48a3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.668238 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/56b04919-b144-4049-8ccd-e6de8aaa48a3-env-overrides\") pod \"ovnkube-control-plane-749d76644c-mlbtk\" (UID: \"56b04919-b144-4049-8ccd-e6de8aaa48a3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.674569 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/56b04919-b144-4049-8ccd-e6de8aaa48a3-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-mlbtk\" (UID: \"56b04919-b144-4049-8ccd-e6de8aaa48a3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.687313 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.687369 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xh556\" (UniqueName: \"kubernetes.io/projected/56b04919-b144-4049-8ccd-e6de8aaa48a3-kube-api-access-xh556\") pod \"ovnkube-control-plane-749d76644c-mlbtk\" (UID: \"56b04919-b144-4049-8ccd-e6de8aaa48a3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.701874 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.714241 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.714286 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.714299 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.714320 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.714333 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:46Z","lastTransitionTime":"2025-10-01T05:29:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.716683 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.738871 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.753187 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.756278 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:29:46 crc kubenswrapper[4661]: E1001 05:29:46.756442 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.774522 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49ceaae4385aad2f35d1f5dc48e03067efd1864254b7452239c3242d2d424e87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.794529 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56b04919-b144-4049-8ccd-e6de8aaa48a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mlbtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:46Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.799850 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.816956 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.817017 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.817037 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.817063 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.817080 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:46Z","lastTransitionTime":"2025-10-01T05:29:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.920150 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.920426 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.920446 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.920467 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.920480 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:46Z","lastTransitionTime":"2025-10-01T05:29:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.972563 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:29:46 crc kubenswrapper[4661]: I1001 05:29:46.972788 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:46 crc kubenswrapper[4661]: E1001 05:29:46.972838 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:30:02.972781259 +0000 UTC m=+51.910759903 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:29:46 crc kubenswrapper[4661]: E1001 05:29:46.972904 4661 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 05:29:46 crc kubenswrapper[4661]: E1001 05:29:46.972996 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 05:30:02.972960363 +0000 UTC m=+51.910939007 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.023172 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.023234 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.023251 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.023277 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.023294 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:47Z","lastTransitionTime":"2025-10-01T05:29:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.073391 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.073451 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.073490 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:29:47 crc kubenswrapper[4661]: E1001 05:29:47.073604 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 05:29:47 crc kubenswrapper[4661]: E1001 05:29:47.073624 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 05:29:47 crc kubenswrapper[4661]: E1001 05:29:47.073658 4661 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:47 crc kubenswrapper[4661]: E1001 05:29:47.073702 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 05:30:03.073686579 +0000 UTC m=+52.011665193 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:47 crc kubenswrapper[4661]: E1001 05:29:47.074198 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 05:29:47 crc kubenswrapper[4661]: E1001 05:29:47.074216 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 05:29:47 crc kubenswrapper[4661]: E1001 05:29:47.074227 4661 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:47 crc kubenswrapper[4661]: E1001 05:29:47.074256 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 05:30:03.074246984 +0000 UTC m=+52.012225608 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:29:47 crc kubenswrapper[4661]: E1001 05:29:47.074303 4661 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 05:29:47 crc kubenswrapper[4661]: E1001 05:29:47.074330 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 05:30:03.074320476 +0000 UTC m=+52.012299090 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.078720 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" event={"ID":"56b04919-b144-4049-8ccd-e6de8aaa48a3","Type":"ContainerStarted","Data":"94a9d9d227224242f462307450b3c7b9b24b8cebb251f6a3b42ab86cf54e7f21"} Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.081351 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fj7kz_6834e918-6be2-4c19-ac03-80fa36a2659c/ovnkube-controller/0.log" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.086403 4661 generic.go:334] "Generic (PLEG): container finished" podID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerID="49ceaae4385aad2f35d1f5dc48e03067efd1864254b7452239c3242d2d424e87" exitCode=1 Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.086507 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerDied","Data":"49ceaae4385aad2f35d1f5dc48e03067efd1864254b7452239c3242d2d424e87"} Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.087319 4661 scope.go:117] "RemoveContainer" containerID="49ceaae4385aad2f35d1f5dc48e03067efd1864254b7452239c3242d2d424e87" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.110934 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:47Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.126613 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.126681 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.126699 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.126724 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.126742 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:47Z","lastTransitionTime":"2025-10-01T05:29:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.127566 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:47Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.147106 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:47Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.163191 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:47Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.182242 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:47Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.198717 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:47Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.220352 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:47Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.232000 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.232056 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.232079 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.232108 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.232131 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:47Z","lastTransitionTime":"2025-10-01T05:29:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.243758 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:47Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.263803 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:47Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.286695 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:47Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.319300 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49ceaae4385aad2f35d1f5dc48e03067efd1864254b7452239c3242d2d424e87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49ceaae4385aad2f35d1f5dc48e03067efd1864254b7452239c3242d2d424e87\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"ler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1001 05:29:46.988723 5881 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1001 05:29:46.988752 5881 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 05:29:46.988767 5881 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 05:29:46.988796 5881 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1001 05:29:46.988844 5881 factory.go:656] Stopping watch factory\\\\nI1001 05:29:46.988870 5881 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1001 05:29:46.988885 5881 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1001 05:29:46.988899 5881 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1001 05:29:46.988912 5881 handler.go:208] Removed *v1.Node event handler 7\\\\nI1001 05:29:46.988930 5881 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:29:46.989288 5881 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:29:46.989425 5881 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:29:46.989719 5881 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:47Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.334968 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.335036 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.335054 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.335079 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.335091 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:47Z","lastTransitionTime":"2025-10-01T05:29:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.338693 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56b04919-b144-4049-8ccd-e6de8aaa48a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mlbtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:47Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.354004 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:47Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.364829 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:47Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.378899 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:47Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.392402 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:47Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.437000 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.437045 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.437057 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.437077 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.437091 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:47Z","lastTransitionTime":"2025-10-01T05:29:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.540538 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.540585 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.540601 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.540624 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.540662 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:47Z","lastTransitionTime":"2025-10-01T05:29:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.643164 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.643205 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.643214 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.643227 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.643237 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:47Z","lastTransitionTime":"2025-10-01T05:29:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.747562 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.747598 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.747608 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.747623 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.747648 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:47Z","lastTransitionTime":"2025-10-01T05:29:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.756049 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.756101 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:47 crc kubenswrapper[4661]: E1001 05:29:47.756177 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:29:47 crc kubenswrapper[4661]: E1001 05:29:47.756267 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.778845 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.778908 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.778927 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.778955 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.778972 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:47Z","lastTransitionTime":"2025-10-01T05:29:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:47 crc kubenswrapper[4661]: E1001 05:29:47.806159 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:47Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.810979 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.811038 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.811051 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.811078 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.811093 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:47Z","lastTransitionTime":"2025-10-01T05:29:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:47 crc kubenswrapper[4661]: E1001 05:29:47.831250 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:47Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.838419 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.838480 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.838494 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.838522 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.838554 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:47Z","lastTransitionTime":"2025-10-01T05:29:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:47 crc kubenswrapper[4661]: E1001 05:29:47.855404 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:47Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.859870 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.859921 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.859939 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.859959 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.859972 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:47Z","lastTransitionTime":"2025-10-01T05:29:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:47 crc kubenswrapper[4661]: E1001 05:29:47.884796 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:47Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.889087 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.889135 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.889149 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.889170 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.889184 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:47Z","lastTransitionTime":"2025-10-01T05:29:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:47 crc kubenswrapper[4661]: E1001 05:29:47.904743 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:47Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:47 crc kubenswrapper[4661]: E1001 05:29:47.904892 4661 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.906196 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.906227 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.906239 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.906255 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:47 crc kubenswrapper[4661]: I1001 05:29:47.906265 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:47Z","lastTransitionTime":"2025-10-01T05:29:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.009371 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.009457 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.009487 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.009529 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.009556 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:48Z","lastTransitionTime":"2025-10-01T05:29:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.028158 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-rsrzg"] Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.028895 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:29:48 crc kubenswrapper[4661]: E1001 05:29:48.028993 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.047954 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.064697 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.082905 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs\") pod \"network-metrics-daemon-rsrzg\" (UID: \"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\") " pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.083015 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4l4fc\" (UniqueName: \"kubernetes.io/projected/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-kube-api-access-4l4fc\") pod \"network-metrics-daemon-rsrzg\" (UID: \"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\") " pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.091084 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.095124 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fj7kz_6834e918-6be2-4c19-ac03-80fa36a2659c/ovnkube-controller/0.log" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.100085 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerStarted","Data":"66ccab068a4ef62193c323a3ff6198f0539e4e9074f0dbd548b5aceb2a7a32bf"} Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.100268 4661 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.103927 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" event={"ID":"56b04919-b144-4049-8ccd-e6de8aaa48a3","Type":"ContainerStarted","Data":"8680402d36512dd777473cbf20e799044a1917c00862bb415c6743e7e0eeb078"} Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.103979 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" event={"ID":"56b04919-b144-4049-8ccd-e6de8aaa48a3","Type":"ContainerStarted","Data":"6a532bf2b73ecc0de4b459e1b39f05fcc3a989c37ac4be03ac2d695a65598223"} Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.106671 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.112710 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.112762 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.112775 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.112798 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.112811 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:48Z","lastTransitionTime":"2025-10-01T05:29:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.141813 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49ceaae4385aad2f35d1f5dc48e03067efd1864254b7452239c3242d2d424e87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49ceaae4385aad2f35d1f5dc48e03067efd1864254b7452239c3242d2d424e87\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"ler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1001 05:29:46.988723 5881 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1001 05:29:46.988752 5881 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 05:29:46.988767 5881 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 05:29:46.988796 5881 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1001 05:29:46.988844 5881 factory.go:656] Stopping watch factory\\\\nI1001 05:29:46.988870 5881 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1001 05:29:46.988885 5881 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1001 05:29:46.988899 5881 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1001 05:29:46.988912 5881 handler.go:208] Removed *v1.Node event handler 7\\\\nI1001 05:29:46.988930 5881 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:29:46.989288 5881 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:29:46.989425 5881 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:29:46.989719 5881 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.162600 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56b04919-b144-4049-8ccd-e6de8aaa48a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mlbtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.184673 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4l4fc\" (UniqueName: \"kubernetes.io/projected/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-kube-api-access-4l4fc\") pod \"network-metrics-daemon-rsrzg\" (UID: \"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\") " pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.184928 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs\") pod \"network-metrics-daemon-rsrzg\" (UID: \"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\") " pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:29:48 crc kubenswrapper[4661]: E1001 05:29:48.186885 4661 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 05:29:48 crc kubenswrapper[4661]: E1001 05:29:48.186969 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs podName:6f05366d-d4ea-4cf0-b2cf-3a787dca8115 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:48.686939797 +0000 UTC m=+37.624918451 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs") pod "network-metrics-daemon-rsrzg" (UID: "6f05366d-d4ea-4cf0-b2cf-3a787dca8115") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.191413 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.214560 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4l4fc\" (UniqueName: \"kubernetes.io/projected/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-kube-api-access-4l4fc\") pod \"network-metrics-daemon-rsrzg\" (UID: \"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\") " pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.215258 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.216109 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.216159 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.216177 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.216206 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.216228 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:48Z","lastTransitionTime":"2025-10-01T05:29:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.238931 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.259865 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rsrzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.275777 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.294909 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.314231 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.320353 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.320656 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.320754 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.320888 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.321006 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:48Z","lastTransitionTime":"2025-10-01T05:29:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.333067 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.352801 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.370803 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.385943 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.401445 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.416566 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.423748 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.423802 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.423821 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.423845 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.423864 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:48Z","lastTransitionTime":"2025-10-01T05:29:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.432948 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.449438 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.462408 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.480670 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.496322 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.508371 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.527283 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.527340 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.527358 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.527386 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.527405 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:48Z","lastTransitionTime":"2025-10-01T05:29:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.534969 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.553203 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.582708 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66ccab068a4ef62193c323a3ff6198f0539e4e9074f0dbd548b5aceb2a7a32bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49ceaae4385aad2f35d1f5dc48e03067efd1864254b7452239c3242d2d424e87\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"ler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1001 05:29:46.988723 5881 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1001 05:29:46.988752 5881 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 05:29:46.988767 5881 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 05:29:46.988796 5881 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1001 05:29:46.988844 5881 factory.go:656] Stopping watch factory\\\\nI1001 05:29:46.988870 5881 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1001 05:29:46.988885 5881 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1001 05:29:46.988899 5881 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1001 05:29:46.988912 5881 handler.go:208] Removed *v1.Node event handler 7\\\\nI1001 05:29:46.988930 5881 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:29:46.989288 5881 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:29:46.989425 5881 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:29:46.989719 5881 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.601892 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56b04919-b144-4049-8ccd-e6de8aaa48a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8680402d36512dd777473cbf20e799044a1917c00862bb415c6743e7e0eeb078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a532bf2b73ecc0de4b459e1b39f05fcc3a989c37ac4be03ac2d695a65598223\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mlbtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.630587 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.630646 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.630657 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.630674 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.630688 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:48Z","lastTransitionTime":"2025-10-01T05:29:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.638241 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.660431 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.679118 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.690322 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs\") pod \"network-metrics-daemon-rsrzg\" (UID: \"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\") " pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:29:48 crc kubenswrapper[4661]: E1001 05:29:48.690462 4661 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 05:29:48 crc kubenswrapper[4661]: E1001 05:29:48.690526 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs podName:6f05366d-d4ea-4cf0-b2cf-3a787dca8115 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:49.690509141 +0000 UTC m=+38.628487765 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs") pod "network-metrics-daemon-rsrzg" (UID: "6f05366d-d4ea-4cf0-b2cf-3a787dca8115") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.695434 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rsrzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.709700 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:48Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.734132 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.734190 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.734211 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.734239 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.734260 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:48Z","lastTransitionTime":"2025-10-01T05:29:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.756467 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:29:48 crc kubenswrapper[4661]: E1001 05:29:48.756682 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.837260 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.837305 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.837317 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.837333 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.837344 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:48Z","lastTransitionTime":"2025-10-01T05:29:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.940776 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.940842 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.940864 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.940892 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:48 crc kubenswrapper[4661]: I1001 05:29:48.940910 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:48Z","lastTransitionTime":"2025-10-01T05:29:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.043274 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.043320 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.043331 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.043344 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.043355 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:49Z","lastTransitionTime":"2025-10-01T05:29:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.109211 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fj7kz_6834e918-6be2-4c19-ac03-80fa36a2659c/ovnkube-controller/1.log" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.110363 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fj7kz_6834e918-6be2-4c19-ac03-80fa36a2659c/ovnkube-controller/0.log" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.113473 4661 generic.go:334] "Generic (PLEG): container finished" podID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerID="66ccab068a4ef62193c323a3ff6198f0539e4e9074f0dbd548b5aceb2a7a32bf" exitCode=1 Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.113595 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerDied","Data":"66ccab068a4ef62193c323a3ff6198f0539e4e9074f0dbd548b5aceb2a7a32bf"} Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.113739 4661 scope.go:117] "RemoveContainer" containerID="49ceaae4385aad2f35d1f5dc48e03067efd1864254b7452239c3242d2d424e87" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.115917 4661 scope.go:117] "RemoveContainer" containerID="66ccab068a4ef62193c323a3ff6198f0539e4e9074f0dbd548b5aceb2a7a32bf" Oct 01 05:29:49 crc kubenswrapper[4661]: E1001 05:29:49.116183 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-fj7kz_openshift-ovn-kubernetes(6834e918-6be2-4c19-ac03-80fa36a2659c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.146793 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:49Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.148555 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.148603 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.148613 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.148648 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.148662 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:49Z","lastTransitionTime":"2025-10-01T05:29:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.167615 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:49Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.185489 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:49Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.200521 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rsrzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:49Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.219270 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:49Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.241200 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:49Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.251035 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.251093 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.251115 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.251146 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.251171 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:49Z","lastTransitionTime":"2025-10-01T05:29:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.259784 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:49Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.275436 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:49Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.299042 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:49Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.317269 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:49Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.338590 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:49Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.354384 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.354442 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.354454 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.354469 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.354479 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:49Z","lastTransitionTime":"2025-10-01T05:29:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.355783 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:49Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.369809 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:49Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.391443 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:49Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.406474 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:49Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.433649 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66ccab068a4ef62193c323a3ff6198f0539e4e9074f0dbd548b5aceb2a7a32bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49ceaae4385aad2f35d1f5dc48e03067efd1864254b7452239c3242d2d424e87\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"ler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1001 05:29:46.988723 5881 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1001 05:29:46.988752 5881 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 05:29:46.988767 5881 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 05:29:46.988796 5881 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1001 05:29:46.988844 5881 factory.go:656] Stopping watch factory\\\\nI1001 05:29:46.988870 5881 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1001 05:29:46.988885 5881 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1001 05:29:46.988899 5881 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1001 05:29:46.988912 5881 handler.go:208] Removed *v1.Node event handler 7\\\\nI1001 05:29:46.988930 5881 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:29:46.989288 5881 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:29:46.989425 5881 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:29:46.989719 5881 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://66ccab068a4ef62193c323a3ff6198f0539e4e9074f0dbd548b5aceb2a7a32bf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 05:29:48.318359 6090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1001 05:29:48.318392 6090 factory.go:656] Stopping watch factory\\\\nI1001 05:29:48.318408 6090 ovnkube.go:599] Stopped ovnkube\\\\nI1001 05:29:48.317892 6090 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-f8vtz after 0 failed attempt(s)\\\\nI1001 05:29:48.318415 6090 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 05:29:48.318443 6090 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:29:48.318452 6090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1001 05:29:48.318663 6090 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1001 05:29:48.318735 6090 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:49Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.449666 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56b04919-b144-4049-8ccd-e6de8aaa48a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8680402d36512dd777473cbf20e799044a1917c00862bb415c6743e7e0eeb078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a532bf2b73ecc0de4b459e1b39f05fcc3a989c37ac4be03ac2d695a65598223\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mlbtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:49Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.458011 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.458203 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.458274 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.458352 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.458423 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:49Z","lastTransitionTime":"2025-10-01T05:29:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.561618 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.561701 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.561718 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.561741 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.561758 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:49Z","lastTransitionTime":"2025-10-01T05:29:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.665800 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.665856 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.665877 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.665902 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.665920 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:49Z","lastTransitionTime":"2025-10-01T05:29:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.710466 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs\") pod \"network-metrics-daemon-rsrzg\" (UID: \"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\") " pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:29:49 crc kubenswrapper[4661]: E1001 05:29:49.710699 4661 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 05:29:49 crc kubenswrapper[4661]: E1001 05:29:49.711165 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs podName:6f05366d-d4ea-4cf0-b2cf-3a787dca8115 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:51.711138901 +0000 UTC m=+40.649117515 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs") pod "network-metrics-daemon-rsrzg" (UID: "6f05366d-d4ea-4cf0-b2cf-3a787dca8115") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.756833 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:29:49 crc kubenswrapper[4661]: E1001 05:29:49.756962 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.757144 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:49 crc kubenswrapper[4661]: E1001 05:29:49.757355 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.757510 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:29:49 crc kubenswrapper[4661]: E1001 05:29:49.757851 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.768355 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.768414 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.768431 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.768455 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.768475 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:49Z","lastTransitionTime":"2025-10-01T05:29:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.871890 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.872321 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.872384 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.872460 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.872531 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:49Z","lastTransitionTime":"2025-10-01T05:29:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.975347 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.975393 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.975402 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.975421 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:49 crc kubenswrapper[4661]: I1001 05:29:49.975432 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:49Z","lastTransitionTime":"2025-10-01T05:29:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.077828 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.077901 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.077919 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.077942 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.077959 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:50Z","lastTransitionTime":"2025-10-01T05:29:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.121514 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fj7kz_6834e918-6be2-4c19-ac03-80fa36a2659c/ovnkube-controller/1.log" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.180648 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.180696 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.180709 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.180730 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.180745 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:50Z","lastTransitionTime":"2025-10-01T05:29:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.283929 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.283992 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.284002 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.284023 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.284036 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:50Z","lastTransitionTime":"2025-10-01T05:29:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.392313 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.392415 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.392435 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.392463 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.392491 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:50Z","lastTransitionTime":"2025-10-01T05:29:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.495731 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.495812 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.495831 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.495861 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.495878 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:50Z","lastTransitionTime":"2025-10-01T05:29:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.600163 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.600217 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.600234 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.600258 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.600277 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:50Z","lastTransitionTime":"2025-10-01T05:29:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.703520 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.703590 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.703609 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.703662 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.703684 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:50Z","lastTransitionTime":"2025-10-01T05:29:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.756597 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:29:50 crc kubenswrapper[4661]: E1001 05:29:50.756843 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.807601 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.807675 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.807695 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.807720 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.807737 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:50Z","lastTransitionTime":"2025-10-01T05:29:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.911518 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.911578 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.911596 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.911620 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:50 crc kubenswrapper[4661]: I1001 05:29:50.911668 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:50Z","lastTransitionTime":"2025-10-01T05:29:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.015436 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.015500 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.015517 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.015542 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.015561 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:51Z","lastTransitionTime":"2025-10-01T05:29:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.119168 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.119239 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.119266 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.119296 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.119318 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:51Z","lastTransitionTime":"2025-10-01T05:29:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.222622 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.222896 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.222946 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.222975 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.222996 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:51Z","lastTransitionTime":"2025-10-01T05:29:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.326029 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.326111 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.326137 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.326168 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.326191 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:51Z","lastTransitionTime":"2025-10-01T05:29:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.429202 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.429266 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.429287 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.429311 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.429328 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:51Z","lastTransitionTime":"2025-10-01T05:29:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.532269 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.532315 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.532328 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.532345 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.532357 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:51Z","lastTransitionTime":"2025-10-01T05:29:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.635496 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.635555 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.635574 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.635598 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.635613 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:51Z","lastTransitionTime":"2025-10-01T05:29:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.739587 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.739709 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.739733 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.739762 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.739781 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:51Z","lastTransitionTime":"2025-10-01T05:29:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.756043 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.756193 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.756452 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs\") pod \"network-metrics-daemon-rsrzg\" (UID: \"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\") " pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.756477 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:29:51 crc kubenswrapper[4661]: E1001 05:29:51.756477 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:29:51 crc kubenswrapper[4661]: E1001 05:29:51.756677 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:29:51 crc kubenswrapper[4661]: E1001 05:29:51.756770 4661 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 05:29:51 crc kubenswrapper[4661]: E1001 05:29:51.756799 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:29:51 crc kubenswrapper[4661]: E1001 05:29:51.756899 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs podName:6f05366d-d4ea-4cf0-b2cf-3a787dca8115 nodeName:}" failed. No retries permitted until 2025-10-01 05:29:55.756866311 +0000 UTC m=+44.694844995 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs") pod "network-metrics-daemon-rsrzg" (UID: "6f05366d-d4ea-4cf0-b2cf-3a787dca8115") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.787221 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.809100 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.827712 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.842898 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.842933 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.842942 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.842960 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.842971 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:51Z","lastTransitionTime":"2025-10-01T05:29:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.848770 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rsrzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.867016 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.887081 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.908086 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.931143 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.952939 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.953511 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.953542 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.953579 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.953614 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:51Z","lastTransitionTime":"2025-10-01T05:29:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.959604 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:51 crc kubenswrapper[4661]: I1001 05:29:51.983015 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.002153 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.027675 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66ccab068a4ef62193c323a3ff6198f0539e4e9074f0dbd548b5aceb2a7a32bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49ceaae4385aad2f35d1f5dc48e03067efd1864254b7452239c3242d2d424e87\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:29:47Z\\\",\\\"message\\\":\\\"ler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1001 05:29:46.988723 5881 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1001 05:29:46.988752 5881 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 05:29:46.988767 5881 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 05:29:46.988796 5881 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1001 05:29:46.988844 5881 factory.go:656] Stopping watch factory\\\\nI1001 05:29:46.988870 5881 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1001 05:29:46.988885 5881 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1001 05:29:46.988899 5881 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1001 05:29:46.988912 5881 handler.go:208] Removed *v1.Node event handler 7\\\\nI1001 05:29:46.988930 5881 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:29:46.989288 5881 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:29:46.989425 5881 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:29:46.989719 5881 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://66ccab068a4ef62193c323a3ff6198f0539e4e9074f0dbd548b5aceb2a7a32bf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 05:29:48.318359 6090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1001 05:29:48.318392 6090 factory.go:656] Stopping watch factory\\\\nI1001 05:29:48.318408 6090 ovnkube.go:599] Stopped ovnkube\\\\nI1001 05:29:48.317892 6090 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-f8vtz after 0 failed attempt(s)\\\\nI1001 05:29:48.318415 6090 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 05:29:48.318443 6090 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:29:48.318452 6090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1001 05:29:48.318663 6090 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1001 05:29:48.318735 6090 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:52Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.046580 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56b04919-b144-4049-8ccd-e6de8aaa48a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8680402d36512dd777473cbf20e799044a1917c00862bb415c6743e7e0eeb078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a532bf2b73ecc0de4b459e1b39f05fcc3a989c37ac4be03ac2d695a65598223\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mlbtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:52Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.067482 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.067530 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.067547 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.067571 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.067809 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:52Z","lastTransitionTime":"2025-10-01T05:29:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.068524 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:52Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.082918 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:52Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.107101 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:52Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.125884 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:52Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.171026 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.171089 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.171106 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.171129 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.171147 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:52Z","lastTransitionTime":"2025-10-01T05:29:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.274266 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.274324 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.274342 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.274366 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.274384 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:52Z","lastTransitionTime":"2025-10-01T05:29:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.377452 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.377527 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.377546 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.377574 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.377595 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:52Z","lastTransitionTime":"2025-10-01T05:29:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.480886 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.480980 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.481005 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.481040 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.481059 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:52Z","lastTransitionTime":"2025-10-01T05:29:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.585013 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.585073 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.585091 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.585115 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.585133 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:52Z","lastTransitionTime":"2025-10-01T05:29:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.688883 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.688947 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.688964 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.688992 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.689009 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:52Z","lastTransitionTime":"2025-10-01T05:29:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.756854 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:29:52 crc kubenswrapper[4661]: E1001 05:29:52.757030 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.792552 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.792717 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.792745 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.792778 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.792802 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:52Z","lastTransitionTime":"2025-10-01T05:29:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.896142 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.896204 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.896221 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.896246 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:52 crc kubenswrapper[4661]: I1001 05:29:52.896263 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:52Z","lastTransitionTime":"2025-10-01T05:29:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.000442 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.000511 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.000529 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.000555 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.000574 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:53Z","lastTransitionTime":"2025-10-01T05:29:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.103541 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.103614 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.103682 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.103712 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.103732 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:53Z","lastTransitionTime":"2025-10-01T05:29:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.207822 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.207900 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.207920 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.207946 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.207972 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:53Z","lastTransitionTime":"2025-10-01T05:29:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.310767 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.310845 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.310864 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.311315 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.311375 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:53Z","lastTransitionTime":"2025-10-01T05:29:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.414802 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.414874 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.414896 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.414928 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.414949 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:53Z","lastTransitionTime":"2025-10-01T05:29:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.519615 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.519722 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.519743 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.519775 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.519798 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:53Z","lastTransitionTime":"2025-10-01T05:29:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.623114 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.623165 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.623177 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.623195 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.623213 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:53Z","lastTransitionTime":"2025-10-01T05:29:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.726365 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.726405 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.726414 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.726428 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.726437 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:53Z","lastTransitionTime":"2025-10-01T05:29:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.756299 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.756340 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.756375 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:53 crc kubenswrapper[4661]: E1001 05:29:53.756442 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:29:53 crc kubenswrapper[4661]: E1001 05:29:53.756505 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:29:53 crc kubenswrapper[4661]: E1001 05:29:53.756570 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.829768 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.829818 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.829838 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.829860 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.829880 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:53Z","lastTransitionTime":"2025-10-01T05:29:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.933612 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.933707 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.933733 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.933764 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:53 crc kubenswrapper[4661]: I1001 05:29:53.933790 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:53Z","lastTransitionTime":"2025-10-01T05:29:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.036131 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.036195 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.036219 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.036249 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.036269 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:54Z","lastTransitionTime":"2025-10-01T05:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.139867 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.139947 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.139969 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.139998 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.140018 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:54Z","lastTransitionTime":"2025-10-01T05:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.243349 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.243428 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.243454 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.243486 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.243511 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:54Z","lastTransitionTime":"2025-10-01T05:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.346783 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.346850 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.346868 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.346894 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.346913 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:54Z","lastTransitionTime":"2025-10-01T05:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.449785 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.449835 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.449852 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.449869 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.449883 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:54Z","lastTransitionTime":"2025-10-01T05:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.552801 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.552860 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.552877 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.552903 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.552921 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:54Z","lastTransitionTime":"2025-10-01T05:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.656943 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.656995 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.657007 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.657030 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.657043 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:54Z","lastTransitionTime":"2025-10-01T05:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.756614 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:29:54 crc kubenswrapper[4661]: E1001 05:29:54.756867 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.759700 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.760022 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.760075 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.760100 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.760122 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:54Z","lastTransitionTime":"2025-10-01T05:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.863826 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.863916 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.863935 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.863966 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.863984 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:54Z","lastTransitionTime":"2025-10-01T05:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.967151 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.967198 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.967209 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.967226 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:54 crc kubenswrapper[4661]: I1001 05:29:54.967240 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:54Z","lastTransitionTime":"2025-10-01T05:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.070145 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.070188 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.070200 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.070216 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.070229 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:55Z","lastTransitionTime":"2025-10-01T05:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.172803 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.172862 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.172881 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.172904 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.172921 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:55Z","lastTransitionTime":"2025-10-01T05:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.276299 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.276394 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.276425 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.276455 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.276481 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:55Z","lastTransitionTime":"2025-10-01T05:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.379199 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.379266 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.379288 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.379318 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.379348 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:55Z","lastTransitionTime":"2025-10-01T05:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.483091 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.483150 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.483167 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.483190 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.483207 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:55Z","lastTransitionTime":"2025-10-01T05:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.586231 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.586287 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.586303 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.586334 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.586352 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:55Z","lastTransitionTime":"2025-10-01T05:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.689286 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.689343 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.689360 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.689381 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.689398 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:55Z","lastTransitionTime":"2025-10-01T05:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.756342 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.756389 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.756341 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:29:55 crc kubenswrapper[4661]: E1001 05:29:55.756508 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:29:55 crc kubenswrapper[4661]: E1001 05:29:55.756731 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:29:55 crc kubenswrapper[4661]: E1001 05:29:55.756972 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.792810 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.792871 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.792884 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.792913 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.792929 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:55Z","lastTransitionTime":"2025-10-01T05:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.814680 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs\") pod \"network-metrics-daemon-rsrzg\" (UID: \"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\") " pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:29:55 crc kubenswrapper[4661]: E1001 05:29:55.815005 4661 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 05:29:55 crc kubenswrapper[4661]: E1001 05:29:55.815200 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs podName:6f05366d-d4ea-4cf0-b2cf-3a787dca8115 nodeName:}" failed. No retries permitted until 2025-10-01 05:30:03.815165502 +0000 UTC m=+52.753144136 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs") pod "network-metrics-daemon-rsrzg" (UID: "6f05366d-d4ea-4cf0-b2cf-3a787dca8115") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.897040 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.897104 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.897123 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.897149 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:55 crc kubenswrapper[4661]: I1001 05:29:55.897169 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:55Z","lastTransitionTime":"2025-10-01T05:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.000346 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.000400 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.000417 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.000439 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.000456 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:56Z","lastTransitionTime":"2025-10-01T05:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.103852 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.103916 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.103936 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.103965 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.103988 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:56Z","lastTransitionTime":"2025-10-01T05:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.206663 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.206729 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.206750 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.206774 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.206792 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:56Z","lastTransitionTime":"2025-10-01T05:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.309719 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.309777 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.309794 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.309819 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.309836 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:56Z","lastTransitionTime":"2025-10-01T05:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.413157 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.413223 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.413239 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.413262 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.413280 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:56Z","lastTransitionTime":"2025-10-01T05:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.516208 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.516272 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.516290 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.516313 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.516334 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:56Z","lastTransitionTime":"2025-10-01T05:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.619415 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.619467 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.619483 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.619505 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.619521 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:56Z","lastTransitionTime":"2025-10-01T05:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.722128 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.722193 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.722212 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.722236 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.722255 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:56Z","lastTransitionTime":"2025-10-01T05:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.755866 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:29:56 crc kubenswrapper[4661]: E1001 05:29:56.756107 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.824799 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.824858 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.824875 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.824898 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.824918 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:56Z","lastTransitionTime":"2025-10-01T05:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.927899 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.927959 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.927977 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.928002 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:56 crc kubenswrapper[4661]: I1001 05:29:56.928020 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:56Z","lastTransitionTime":"2025-10-01T05:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.030807 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.030852 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.030870 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.030892 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.030910 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:57Z","lastTransitionTime":"2025-10-01T05:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.133155 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.133221 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.133238 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.133264 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.133282 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:57Z","lastTransitionTime":"2025-10-01T05:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.236512 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.236583 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.236595 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.236611 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.236653 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:57Z","lastTransitionTime":"2025-10-01T05:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.339878 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.339933 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.339945 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.339973 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.339989 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:57Z","lastTransitionTime":"2025-10-01T05:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.442761 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.442826 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.442846 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.442870 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.442889 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:57Z","lastTransitionTime":"2025-10-01T05:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.545876 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.545946 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.545970 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.546003 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.546036 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:57Z","lastTransitionTime":"2025-10-01T05:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.649381 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.649444 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.649464 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.649490 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.649508 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:57Z","lastTransitionTime":"2025-10-01T05:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.752914 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.752968 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.752985 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.753008 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.753026 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:57Z","lastTransitionTime":"2025-10-01T05:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.756188 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.756286 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:29:57 crc kubenswrapper[4661]: E1001 05:29:57.756409 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.756474 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:57 crc kubenswrapper[4661]: E1001 05:29:57.756690 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:29:57 crc kubenswrapper[4661]: E1001 05:29:57.756897 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.858506 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.858586 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.858604 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.858627 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.858671 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:57Z","lastTransitionTime":"2025-10-01T05:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.961920 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.961992 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.962056 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.962082 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:57 crc kubenswrapper[4661]: I1001 05:29:57.962099 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:57Z","lastTransitionTime":"2025-10-01T05:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.064932 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.064979 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.065003 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.065028 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.065044 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:58Z","lastTransitionTime":"2025-10-01T05:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.165852 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.165922 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.165940 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.165967 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.165986 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:58Z","lastTransitionTime":"2025-10-01T05:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:58 crc kubenswrapper[4661]: E1001 05:29:58.188226 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:58Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.193216 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.193272 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.193294 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.193324 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.193346 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:58Z","lastTransitionTime":"2025-10-01T05:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:58 crc kubenswrapper[4661]: E1001 05:29:58.214713 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:58Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.221253 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.221330 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.221350 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.221373 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.221391 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:58Z","lastTransitionTime":"2025-10-01T05:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:58 crc kubenswrapper[4661]: E1001 05:29:58.249232 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:58Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.254478 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.254578 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.254597 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.254622 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.254679 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:58Z","lastTransitionTime":"2025-10-01T05:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:58 crc kubenswrapper[4661]: E1001 05:29:58.273162 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:58Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.277872 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.277944 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.277967 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.277991 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.278008 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:58Z","lastTransitionTime":"2025-10-01T05:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:58 crc kubenswrapper[4661]: E1001 05:29:58.293571 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:58Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:58 crc kubenswrapper[4661]: E1001 05:29:58.293737 4661 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.295669 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.295701 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.295714 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.295729 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.295742 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:58Z","lastTransitionTime":"2025-10-01T05:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.399341 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.399406 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.399423 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.399446 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.399462 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:58Z","lastTransitionTime":"2025-10-01T05:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.501690 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.501738 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.501750 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.501766 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.501777 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:58Z","lastTransitionTime":"2025-10-01T05:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.604463 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.604504 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.604513 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.604527 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.604538 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:58Z","lastTransitionTime":"2025-10-01T05:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.707210 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.707274 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.707294 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.707318 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.707335 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:58Z","lastTransitionTime":"2025-10-01T05:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.757189 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:29:58 crc kubenswrapper[4661]: E1001 05:29:58.757453 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.809818 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.809864 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.809877 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.809892 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.809905 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:58Z","lastTransitionTime":"2025-10-01T05:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.913415 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.913477 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.913495 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.913519 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:58 crc kubenswrapper[4661]: I1001 05:29:58.913539 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:58Z","lastTransitionTime":"2025-10-01T05:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.016546 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.016612 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.016655 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.016682 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.016711 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:59Z","lastTransitionTime":"2025-10-01T05:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.120113 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.120152 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.120162 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.120179 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.120189 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:59Z","lastTransitionTime":"2025-10-01T05:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.223544 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.223587 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.223603 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.223624 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.223676 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:59Z","lastTransitionTime":"2025-10-01T05:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.327102 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.327179 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.327202 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.327231 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.327253 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:59Z","lastTransitionTime":"2025-10-01T05:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.435173 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.435243 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.435263 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.435291 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.435313 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:59Z","lastTransitionTime":"2025-10-01T05:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.538378 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.538422 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.538439 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.538461 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.538479 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:59Z","lastTransitionTime":"2025-10-01T05:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.616840 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.618214 4661 scope.go:117] "RemoveContainer" containerID="66ccab068a4ef62193c323a3ff6198f0539e4e9074f0dbd548b5aceb2a7a32bf" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.633402 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:59Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.641274 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.641315 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.641333 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.641355 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.641373 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:59Z","lastTransitionTime":"2025-10-01T05:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.655347 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:59Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.679867 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:59Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.706505 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:59Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.730720 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:59Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.749322 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.749389 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.749408 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.749432 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.749459 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:59Z","lastTransitionTime":"2025-10-01T05:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.755733 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:59Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.756915 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.757047 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:29:59 crc kubenswrapper[4661]: E1001 05:29:59.757112 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:29:59 crc kubenswrapper[4661]: E1001 05:29:59.757212 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.757769 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:29:59 crc kubenswrapper[4661]: E1001 05:29:59.757903 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.778476 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:59Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.796255 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56b04919-b144-4049-8ccd-e6de8aaa48a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8680402d36512dd777473cbf20e799044a1917c00862bb415c6743e7e0eeb078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a532bf2b73ecc0de4b459e1b39f05fcc3a989c37ac4be03ac2d695a65598223\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mlbtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:59Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.814932 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:59Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.832475 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:59Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.853966 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.854187 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.854267 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.854329 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.854403 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:59Z","lastTransitionTime":"2025-10-01T05:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.855196 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:59Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.872073 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:59Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.901897 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66ccab068a4ef62193c323a3ff6198f0539e4e9074f0dbd548b5aceb2a7a32bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://66ccab068a4ef62193c323a3ff6198f0539e4e9074f0dbd548b5aceb2a7a32bf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 05:29:48.318359 6090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1001 05:29:48.318392 6090 factory.go:656] Stopping watch factory\\\\nI1001 05:29:48.318408 6090 ovnkube.go:599] Stopped ovnkube\\\\nI1001 05:29:48.317892 6090 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-f8vtz after 0 failed attempt(s)\\\\nI1001 05:29:48.318415 6090 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 05:29:48.318443 6090 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:29:48.318452 6090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1001 05:29:48.318663 6090 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1001 05:29:48.318735 6090 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-fj7kz_openshift-ovn-kubernetes(6834e918-6be2-4c19-ac03-80fa36a2659c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:59Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.932576 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:59Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.952051 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:59Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.960953 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.961017 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.961033 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.961056 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.961098 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:29:59Z","lastTransitionTime":"2025-10-01T05:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.973063 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:59Z is after 2025-08-24T17:21:41Z" Oct 01 05:29:59 crc kubenswrapper[4661]: I1001 05:29:59.986998 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rsrzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:29:59Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.063543 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.063588 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.063606 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.063666 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.063684 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:00Z","lastTransitionTime":"2025-10-01T05:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.165819 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.165867 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.165878 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.165897 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.165908 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:00Z","lastTransitionTime":"2025-10-01T05:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.269011 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.269074 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.269093 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.269117 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.269135 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:00Z","lastTransitionTime":"2025-10-01T05:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.372871 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.373229 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.373247 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.373273 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.373290 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:00Z","lastTransitionTime":"2025-10-01T05:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.475976 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.476042 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.476061 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.476084 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.476101 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:00Z","lastTransitionTime":"2025-10-01T05:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.579905 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.579970 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.579985 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.580010 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.580029 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:00Z","lastTransitionTime":"2025-10-01T05:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.683010 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.683079 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.683097 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.683121 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.683139 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:00Z","lastTransitionTime":"2025-10-01T05:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.756494 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:00 crc kubenswrapper[4661]: E1001 05:30:00.756711 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.786330 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.786365 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.786374 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.786388 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.786401 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:00Z","lastTransitionTime":"2025-10-01T05:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.889423 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.889475 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.889498 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.889527 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.889552 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:00Z","lastTransitionTime":"2025-10-01T05:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.993141 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.993204 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.993227 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.993255 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:00 crc kubenswrapper[4661]: I1001 05:30:00.993277 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:00Z","lastTransitionTime":"2025-10-01T05:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.096440 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.096509 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.096537 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.096570 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.096594 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:01Z","lastTransitionTime":"2025-10-01T05:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.167518 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fj7kz_6834e918-6be2-4c19-ac03-80fa36a2659c/ovnkube-controller/1.log" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.171439 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerStarted","Data":"3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb"} Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.199864 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.199910 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.199923 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.199942 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.199956 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:01Z","lastTransitionTime":"2025-10-01T05:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.305580 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.305697 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.305723 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.305760 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.305785 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:01Z","lastTransitionTime":"2025-10-01T05:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.409230 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.409280 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.409295 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.409320 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.409338 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:01Z","lastTransitionTime":"2025-10-01T05:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.512492 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.512547 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.512561 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.512580 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.512592 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:01Z","lastTransitionTime":"2025-10-01T05:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.615359 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.615419 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.615433 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.615453 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.615465 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:01Z","lastTransitionTime":"2025-10-01T05:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.718291 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.718337 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.718350 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.718368 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.718379 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:01Z","lastTransitionTime":"2025-10-01T05:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.756721 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.756770 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.756796 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:01 crc kubenswrapper[4661]: E1001 05:30:01.756930 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:01 crc kubenswrapper[4661]: E1001 05:30:01.757514 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:01 crc kubenswrapper[4661]: E1001 05:30:01.757622 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.774733 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:01Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.791893 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:01Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.810819 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:01Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.820817 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.820877 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.820897 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.820922 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.820939 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:01Z","lastTransitionTime":"2025-10-01T05:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.833739 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:01Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.855910 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66ccab068a4ef62193c323a3ff6198f0539e4e9074f0dbd548b5aceb2a7a32bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://66ccab068a4ef62193c323a3ff6198f0539e4e9074f0dbd548b5aceb2a7a32bf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 05:29:48.318359 6090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1001 05:29:48.318392 6090 factory.go:656] Stopping watch factory\\\\nI1001 05:29:48.318408 6090 ovnkube.go:599] Stopped ovnkube\\\\nI1001 05:29:48.317892 6090 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-f8vtz after 0 failed attempt(s)\\\\nI1001 05:29:48.318415 6090 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 05:29:48.318443 6090 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:29:48.318452 6090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1001 05:29:48.318663 6090 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1001 05:29:48.318735 6090 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-fj7kz_openshift-ovn-kubernetes(6834e918-6be2-4c19-ac03-80fa36a2659c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:01Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.870486 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56b04919-b144-4049-8ccd-e6de8aaa48a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8680402d36512dd777473cbf20e799044a1917c00862bb415c6743e7e0eeb078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a532bf2b73ecc0de4b459e1b39f05fcc3a989c37ac4be03ac2d695a65598223\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mlbtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:01Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.894887 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:01Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.910088 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:01Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.924698 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.924756 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.924776 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.924802 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.924821 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:01Z","lastTransitionTime":"2025-10-01T05:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.928399 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:01Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.943772 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rsrzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:01Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.962575 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:01Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:01 crc kubenswrapper[4661]: I1001 05:30:01.985038 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:01Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.021145 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:02Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.026919 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.026952 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.026965 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.026981 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.026992 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:02Z","lastTransitionTime":"2025-10-01T05:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.041886 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:02Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.054863 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:02Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.072396 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:02Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.087919 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:02Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.129725 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.129786 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.129804 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.129829 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.129848 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:02Z","lastTransitionTime":"2025-10-01T05:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.175426 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.192626 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:02Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.214232 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:02Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.233102 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.233164 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.233185 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.233209 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.233227 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:02Z","lastTransitionTime":"2025-10-01T05:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.235980 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:02Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.256303 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:02Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.275747 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:02Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.294899 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:02Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.314974 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:02Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.334111 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:02Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.337269 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.337335 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.337361 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.337389 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.337410 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:02Z","lastTransitionTime":"2025-10-01T05:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.352598 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:02Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.376903 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:02Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.390482 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:02Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.420354 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://66ccab068a4ef62193c323a3ff6198f0539e4e9074f0dbd548b5aceb2a7a32bf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 05:29:48.318359 6090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1001 05:29:48.318392 6090 factory.go:656] Stopping watch factory\\\\nI1001 05:29:48.318408 6090 ovnkube.go:599] Stopped ovnkube\\\\nI1001 05:29:48.317892 6090 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-f8vtz after 0 failed attempt(s)\\\\nI1001 05:29:48.318415 6090 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 05:29:48.318443 6090 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:29:48.318452 6090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1001 05:29:48.318663 6090 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1001 05:29:48.318735 6090 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:02Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.439964 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56b04919-b144-4049-8ccd-e6de8aaa48a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8680402d36512dd777473cbf20e799044a1917c00862bb415c6743e7e0eeb078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a532bf2b73ecc0de4b459e1b39f05fcc3a989c37ac4be03ac2d695a65598223\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mlbtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:02Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.442029 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.442082 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.442097 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.442117 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.442131 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:02Z","lastTransitionTime":"2025-10-01T05:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.471830 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:02Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.491800 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:02Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.510725 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:02Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.525802 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rsrzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:02Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.545444 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.545502 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.545518 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.545542 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.545559 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:02Z","lastTransitionTime":"2025-10-01T05:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.648793 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.648843 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.648853 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.648868 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.648880 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:02Z","lastTransitionTime":"2025-10-01T05:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.751213 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.751307 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.751328 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.751353 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.751374 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:02Z","lastTransitionTime":"2025-10-01T05:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.756564 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:02 crc kubenswrapper[4661]: E1001 05:30:02.756797 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.854298 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.854361 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.854382 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.854441 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.854459 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:02Z","lastTransitionTime":"2025-10-01T05:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.957464 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.957533 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.957556 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.957584 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.957606 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:02Z","lastTransitionTime":"2025-10-01T05:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.993949 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:30:02 crc kubenswrapper[4661]: I1001 05:30:02.994092 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:02 crc kubenswrapper[4661]: E1001 05:30:02.994191 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:30:34.994153535 +0000 UTC m=+83.932132209 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:30:02 crc kubenswrapper[4661]: E1001 05:30:02.994243 4661 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 05:30:02 crc kubenswrapper[4661]: E1001 05:30:02.994336 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 05:30:34.994309379 +0000 UTC m=+83.932288033 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.060937 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.060983 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.060999 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.061021 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.061038 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:03Z","lastTransitionTime":"2025-10-01T05:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.095336 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.095414 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.095482 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:03 crc kubenswrapper[4661]: E1001 05:30:03.095695 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 05:30:03 crc kubenswrapper[4661]: E1001 05:30:03.095770 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 05:30:03 crc kubenswrapper[4661]: E1001 05:30:03.095805 4661 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:30:03 crc kubenswrapper[4661]: E1001 05:30:03.095864 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 05:30:03 crc kubenswrapper[4661]: E1001 05:30:03.095915 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 05:30:03 crc kubenswrapper[4661]: E1001 05:30:03.095941 4661 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:30:03 crc kubenswrapper[4661]: E1001 05:30:03.095952 4661 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 05:30:03 crc kubenswrapper[4661]: E1001 05:30:03.095884 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 05:30:35.095857487 +0000 UTC m=+84.033836141 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:30:03 crc kubenswrapper[4661]: E1001 05:30:03.096039 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 05:30:35.096013821 +0000 UTC m=+84.033992465 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:30:03 crc kubenswrapper[4661]: E1001 05:30:03.096071 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 05:30:35.096055492 +0000 UTC m=+84.034034146 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.164127 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.164183 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.164200 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.164223 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.164240 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:03Z","lastTransitionTime":"2025-10-01T05:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.182801 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fj7kz_6834e918-6be2-4c19-ac03-80fa36a2659c/ovnkube-controller/2.log" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.184166 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fj7kz_6834e918-6be2-4c19-ac03-80fa36a2659c/ovnkube-controller/1.log" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.188400 4661 generic.go:334] "Generic (PLEG): container finished" podID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerID="3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb" exitCode=1 Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.188462 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerDied","Data":"3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb"} Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.188510 4661 scope.go:117] "RemoveContainer" containerID="66ccab068a4ef62193c323a3ff6198f0539e4e9074f0dbd548b5aceb2a7a32bf" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.189757 4661 scope.go:117] "RemoveContainer" containerID="3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb" Oct 01 05:30:03 crc kubenswrapper[4661]: E1001 05:30:03.190088 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fj7kz_openshift-ovn-kubernetes(6834e918-6be2-4c19-ac03-80fa36a2659c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.209575 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:03Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.229898 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:03Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.249516 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:03Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.267085 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.267140 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.267158 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.267187 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.267206 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:03Z","lastTransitionTime":"2025-10-01T05:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.270509 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:03Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.290158 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:03Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.313507 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:03Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.335308 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:03Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.351875 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:03Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.372676 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.373103 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.373283 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.373456 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.373691 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:03Z","lastTransitionTime":"2025-10-01T05:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.380700 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:03Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.399561 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:03Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.431695 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://66ccab068a4ef62193c323a3ff6198f0539e4e9074f0dbd548b5aceb2a7a32bf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 05:29:48.318359 6090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1001 05:29:48.318392 6090 factory.go:656] Stopping watch factory\\\\nI1001 05:29:48.318408 6090 ovnkube.go:599] Stopped ovnkube\\\\nI1001 05:29:48.317892 6090 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-f8vtz after 0 failed attempt(s)\\\\nI1001 05:29:48.318415 6090 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 05:29:48.318443 6090 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:29:48.318452 6090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1001 05:29:48.318663 6090 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1001 05:29:48.318735 6090 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:30:02Z\\\",\\\"message\\\":\\\"3386 6253 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 05:30:02.013452 6253 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013480 6253 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 05:30:02.013552 6253 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013610 6253 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013922 6253 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.015010 6253 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 05:30:02.015030 6253 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 05:30:02.015043 6253 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1001 05:30:02.015067 6253 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:30:02.015096 6253 factory.go:656] Stopping watch factory\\\\nI1001 05:30:02.015109 6253 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1001 05:30:02.015107 6253 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:03Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.444349 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56b04919-b144-4049-8ccd-e6de8aaa48a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8680402d36512dd777473cbf20e799044a1917c00862bb415c6743e7e0eeb078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a532bf2b73ecc0de4b459e1b39f05fcc3a989c37ac4be03ac2d695a65598223\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mlbtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:03Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.461409 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:03Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.476431 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.476498 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.476510 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.476550 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.476563 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:03Z","lastTransitionTime":"2025-10-01T05:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.479298 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:03Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.499675 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:03Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.517699 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rsrzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:03Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.543766 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:03Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.580654 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.580696 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.580709 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.580754 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.580765 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:03Z","lastTransitionTime":"2025-10-01T05:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.683421 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.683477 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.683494 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.683519 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.683537 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:03Z","lastTransitionTime":"2025-10-01T05:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.756771 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.756835 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.756975 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:03 crc kubenswrapper[4661]: E1001 05:30:03.757118 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:03 crc kubenswrapper[4661]: E1001 05:30:03.757262 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:03 crc kubenswrapper[4661]: E1001 05:30:03.757383 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.787405 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.787469 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.787492 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.787523 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.787548 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:03Z","lastTransitionTime":"2025-10-01T05:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.890363 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.890426 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.890436 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.890450 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.890460 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:03Z","lastTransitionTime":"2025-10-01T05:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.905108 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs\") pod \"network-metrics-daemon-rsrzg\" (UID: \"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\") " pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:03 crc kubenswrapper[4661]: E1001 05:30:03.905252 4661 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 05:30:03 crc kubenswrapper[4661]: E1001 05:30:03.905311 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs podName:6f05366d-d4ea-4cf0-b2cf-3a787dca8115 nodeName:}" failed. No retries permitted until 2025-10-01 05:30:19.905295066 +0000 UTC m=+68.843273680 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs") pod "network-metrics-daemon-rsrzg" (UID: "6f05366d-d4ea-4cf0-b2cf-3a787dca8115") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.993143 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.993206 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.993223 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.993247 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:03 crc kubenswrapper[4661]: I1001 05:30:03.993263 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:03Z","lastTransitionTime":"2025-10-01T05:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.097469 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.097561 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.097578 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.097616 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.097653 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:04Z","lastTransitionTime":"2025-10-01T05:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.163371 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.179764 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.183938 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.195503 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fj7kz_6834e918-6be2-4c19-ac03-80fa36a2659c/ovnkube-controller/2.log" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.198359 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.199680 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.199853 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.199873 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.199897 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.199915 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:04Z","lastTransitionTime":"2025-10-01T05:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.200427 4661 scope.go:117] "RemoveContainer" containerID="3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb" Oct 01 05:30:04 crc kubenswrapper[4661]: E1001 05:30:04.200701 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fj7kz_openshift-ovn-kubernetes(6834e918-6be2-4c19-ac03-80fa36a2659c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.216437 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.231353 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.252699 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://66ccab068a4ef62193c323a3ff6198f0539e4e9074f0dbd548b5aceb2a7a32bf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 05:29:48.318359 6090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1001 05:29:48.318392 6090 factory.go:656] Stopping watch factory\\\\nI1001 05:29:48.318408 6090 ovnkube.go:599] Stopped ovnkube\\\\nI1001 05:29:48.317892 6090 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-f8vtz after 0 failed attempt(s)\\\\nI1001 05:29:48.318415 6090 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 05:29:48.318443 6090 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:29:48.318452 6090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1001 05:29:48.318663 6090 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1001 05:29:48.318735 6090 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:30:02Z\\\",\\\"message\\\":\\\"3386 6253 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 05:30:02.013452 6253 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013480 6253 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 05:30:02.013552 6253 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013610 6253 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013922 6253 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.015010 6253 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 05:30:02.015030 6253 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 05:30:02.015043 6253 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1001 05:30:02.015067 6253 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:30:02.015096 6253 factory.go:656] Stopping watch factory\\\\nI1001 05:30:02.015109 6253 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1001 05:30:02.015107 6253 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.270095 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56b04919-b144-4049-8ccd-e6de8aaa48a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8680402d36512dd777473cbf20e799044a1917c00862bb415c6743e7e0eeb078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a532bf2b73ecc0de4b459e1b39f05fcc3a989c37ac4be03ac2d695a65598223\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mlbtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.291897 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.303213 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.303283 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.303308 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.303338 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.303364 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:04Z","lastTransitionTime":"2025-10-01T05:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.313844 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.328086 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.346283 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rsrzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.359882 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.381903 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.399987 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.406049 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.406111 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.406132 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.406154 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.406171 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:04Z","lastTransitionTime":"2025-10-01T05:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.419859 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.436501 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.455469 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.476180 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.494069 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.509209 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.509273 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.509290 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.509313 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.509328 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:04Z","lastTransitionTime":"2025-10-01T05:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.513749 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.529888 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.544343 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.563370 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.582231 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.597532 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.612398 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.612432 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.612440 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.612469 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.612479 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:04Z","lastTransitionTime":"2025-10-01T05:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.614056 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.627197 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.654851 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:30:02Z\\\",\\\"message\\\":\\\"3386 6253 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 05:30:02.013452 6253 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013480 6253 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 05:30:02.013552 6253 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013610 6253 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013922 6253 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.015010 6253 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 05:30:02.015030 6253 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 05:30:02.015043 6253 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1001 05:30:02.015067 6253 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:30:02.015096 6253 factory.go:656] Stopping watch factory\\\\nI1001 05:30:02.015109 6253 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1001 05:30:02.015107 6253 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:30:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fj7kz_openshift-ovn-kubernetes(6834e918-6be2-4c19-ac03-80fa36a2659c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.673081 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56b04919-b144-4049-8ccd-e6de8aaa48a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8680402d36512dd777473cbf20e799044a1917c00862bb415c6743e7e0eeb078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a532bf2b73ecc0de4b459e1b39f05fcc3a989c37ac4be03ac2d695a65598223\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mlbtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.692755 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.709885 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.714217 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.714276 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.714295 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.714319 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.714337 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:04Z","lastTransitionTime":"2025-10-01T05:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.727485 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.743207 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rsrzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.756289 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:04 crc kubenswrapper[4661]: E1001 05:30:04.756441 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.771662 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.785016 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.803147 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959c16a6-1b6f-4dfe-af76-d74d00198a19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d44275c6d9c54d40ca33dac51d37ca7a9784345b0890601d797c47f4a31e17ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a86e8d41edfedbf8a5f0c04d2a0f52c06dbfdefbcdf7c0fe37bc5fd151ffccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ddc9d31f544f57f371c09f3655228bbb0be49788003ae551815a758f444e7d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:04Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.817591 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.817702 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.817726 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.817756 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.817780 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:04Z","lastTransitionTime":"2025-10-01T05:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.921141 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.921197 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.921217 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.921240 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:04 crc kubenswrapper[4661]: I1001 05:30:04.921257 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:04Z","lastTransitionTime":"2025-10-01T05:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.024124 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.024185 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.024204 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.024231 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.024254 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:05Z","lastTransitionTime":"2025-10-01T05:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.127176 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.127250 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.127272 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.127304 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.127326 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:05Z","lastTransitionTime":"2025-10-01T05:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.230146 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.230183 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.230194 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.230209 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.230221 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:05Z","lastTransitionTime":"2025-10-01T05:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.333359 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.333429 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.333450 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.333475 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.333492 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:05Z","lastTransitionTime":"2025-10-01T05:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.436974 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.437033 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.437050 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.437072 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.437089 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:05Z","lastTransitionTime":"2025-10-01T05:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.539895 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.539966 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.539984 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.540007 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.540023 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:05Z","lastTransitionTime":"2025-10-01T05:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.642890 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.642932 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.642944 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.642960 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.642972 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:05Z","lastTransitionTime":"2025-10-01T05:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.746617 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.746723 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.746743 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.746767 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.746785 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:05Z","lastTransitionTime":"2025-10-01T05:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.756075 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.756880 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.756584 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:05 crc kubenswrapper[4661]: E1001 05:30:05.757087 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:05 crc kubenswrapper[4661]: E1001 05:30:05.757537 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:05 crc kubenswrapper[4661]: E1001 05:30:05.763180 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.849487 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.849552 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.849569 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.849594 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.849612 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:05Z","lastTransitionTime":"2025-10-01T05:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.952797 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.952854 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.952876 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.952903 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:05 crc kubenswrapper[4661]: I1001 05:30:05.952929 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:05Z","lastTransitionTime":"2025-10-01T05:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.055159 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.055220 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.055242 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.055266 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.055283 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:06Z","lastTransitionTime":"2025-10-01T05:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.158256 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.158343 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.158361 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.158384 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.158401 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:06Z","lastTransitionTime":"2025-10-01T05:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.261502 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.261560 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.261578 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.261601 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.261620 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:06Z","lastTransitionTime":"2025-10-01T05:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.365145 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.365205 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.365214 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.365228 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.365238 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:06Z","lastTransitionTime":"2025-10-01T05:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.468481 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.468549 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.468574 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.468605 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.468623 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:06Z","lastTransitionTime":"2025-10-01T05:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.571184 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.571290 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.571311 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.571333 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.571349 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:06Z","lastTransitionTime":"2025-10-01T05:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.674410 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.674477 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.674500 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.674530 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.674551 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:06Z","lastTransitionTime":"2025-10-01T05:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.756546 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:06 crc kubenswrapper[4661]: E1001 05:30:06.756745 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.777866 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.777935 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.777954 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.777980 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.777997 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:06Z","lastTransitionTime":"2025-10-01T05:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.880735 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.880797 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.880820 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.880850 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.880875 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:06Z","lastTransitionTime":"2025-10-01T05:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.983524 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.983597 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.983617 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.983694 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:06 crc kubenswrapper[4661]: I1001 05:30:06.983714 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:06Z","lastTransitionTime":"2025-10-01T05:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.086393 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.086439 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.086451 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.086494 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.086532 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:07Z","lastTransitionTime":"2025-10-01T05:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.189672 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.189715 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.189729 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.189750 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.189768 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:07Z","lastTransitionTime":"2025-10-01T05:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.293117 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.293180 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.293199 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.293224 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.293240 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:07Z","lastTransitionTime":"2025-10-01T05:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.396303 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.396365 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.396387 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.396415 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.396439 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:07Z","lastTransitionTime":"2025-10-01T05:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.499742 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.499794 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.499812 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.499834 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.499851 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:07Z","lastTransitionTime":"2025-10-01T05:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.603034 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.603093 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.603108 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.603129 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.603145 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:07Z","lastTransitionTime":"2025-10-01T05:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.706342 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.706415 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.706433 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.706462 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.706480 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:07Z","lastTransitionTime":"2025-10-01T05:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.755934 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.756031 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:07 crc kubenswrapper[4661]: E1001 05:30:07.756109 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.756137 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:07 crc kubenswrapper[4661]: E1001 05:30:07.756197 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:07 crc kubenswrapper[4661]: E1001 05:30:07.756332 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.808553 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.808611 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.808657 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.808684 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.808703 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:07Z","lastTransitionTime":"2025-10-01T05:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.911463 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.911593 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.911620 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.911690 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:07 crc kubenswrapper[4661]: I1001 05:30:07.911716 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:07Z","lastTransitionTime":"2025-10-01T05:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.014597 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.014721 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.014795 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.014838 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.014857 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:08Z","lastTransitionTime":"2025-10-01T05:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.117956 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.118024 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.118041 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.118065 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.118083 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:08Z","lastTransitionTime":"2025-10-01T05:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.221258 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.221364 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.221384 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.221409 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.221428 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:08Z","lastTransitionTime":"2025-10-01T05:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.324258 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.324354 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.324373 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.324434 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.324452 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:08Z","lastTransitionTime":"2025-10-01T05:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.428500 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.428560 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.428578 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.428602 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.428620 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:08Z","lastTransitionTime":"2025-10-01T05:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.531600 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.531700 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.531718 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.531741 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.531758 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:08Z","lastTransitionTime":"2025-10-01T05:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.634060 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.634099 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.634111 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.634127 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.634138 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:08Z","lastTransitionTime":"2025-10-01T05:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.693205 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.693263 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.693284 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.693308 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.693328 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:08Z","lastTransitionTime":"2025-10-01T05:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:08 crc kubenswrapper[4661]: E1001 05:30:08.716488 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:08Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.723259 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.723364 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.723384 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.723450 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.723469 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:08Z","lastTransitionTime":"2025-10-01T05:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:08 crc kubenswrapper[4661]: E1001 05:30:08.745626 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:08Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.750625 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.750708 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.750725 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.750749 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.750769 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:08Z","lastTransitionTime":"2025-10-01T05:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.756801 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:08 crc kubenswrapper[4661]: E1001 05:30:08.756947 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:08 crc kubenswrapper[4661]: E1001 05:30:08.769529 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:08Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.774713 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.774768 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.774786 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.774807 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.774824 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:08Z","lastTransitionTime":"2025-10-01T05:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:08 crc kubenswrapper[4661]: E1001 05:30:08.793484 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:08Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.798712 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.798749 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.798760 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.798779 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.798796 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:08Z","lastTransitionTime":"2025-10-01T05:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:08 crc kubenswrapper[4661]: E1001 05:30:08.814590 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:08Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:08 crc kubenswrapper[4661]: E1001 05:30:08.814751 4661 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.816143 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.816177 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.816188 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.816204 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.816217 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:08Z","lastTransitionTime":"2025-10-01T05:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.918581 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.918657 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.918678 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.918703 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:08 crc kubenswrapper[4661]: I1001 05:30:08.918720 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:08Z","lastTransitionTime":"2025-10-01T05:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.022449 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.022882 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.022900 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.022925 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.022945 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:09Z","lastTransitionTime":"2025-10-01T05:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.125544 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.125603 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.125621 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.125684 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.125702 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:09Z","lastTransitionTime":"2025-10-01T05:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.228797 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.228853 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.228870 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.228896 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.228916 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:09Z","lastTransitionTime":"2025-10-01T05:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.331608 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.331695 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.331713 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.331735 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.331753 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:09Z","lastTransitionTime":"2025-10-01T05:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.434701 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.435096 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.435284 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.435471 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.435671 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:09Z","lastTransitionTime":"2025-10-01T05:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.538071 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.538116 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.538132 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.538153 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.538169 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:09Z","lastTransitionTime":"2025-10-01T05:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.641029 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.641094 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.641117 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.641145 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.641167 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:09Z","lastTransitionTime":"2025-10-01T05:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.744572 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.744670 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.744700 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.744734 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.744759 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:09Z","lastTransitionTime":"2025-10-01T05:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.756874 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.756949 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.756988 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:09 crc kubenswrapper[4661]: E1001 05:30:09.757040 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:09 crc kubenswrapper[4661]: E1001 05:30:09.757151 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:09 crc kubenswrapper[4661]: E1001 05:30:09.757384 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.848192 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.848271 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.848298 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.848332 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.848357 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:09Z","lastTransitionTime":"2025-10-01T05:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.951368 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.951447 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.951465 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.951489 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:09 crc kubenswrapper[4661]: I1001 05:30:09.951507 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:09Z","lastTransitionTime":"2025-10-01T05:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.055718 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.055788 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.055811 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.055841 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.055862 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:10Z","lastTransitionTime":"2025-10-01T05:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.159327 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.159396 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.159442 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.159469 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.159487 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:10Z","lastTransitionTime":"2025-10-01T05:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.263063 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.263135 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.263159 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.263190 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.263214 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:10Z","lastTransitionTime":"2025-10-01T05:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.366165 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.366259 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.366284 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.366316 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.366336 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:10Z","lastTransitionTime":"2025-10-01T05:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.469694 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.469761 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.469785 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.469815 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.469838 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:10Z","lastTransitionTime":"2025-10-01T05:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.573276 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.573351 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.573375 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.573405 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.573428 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:10Z","lastTransitionTime":"2025-10-01T05:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.676322 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.676372 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.676406 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.676431 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.676444 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:10Z","lastTransitionTime":"2025-10-01T05:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.755892 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:10 crc kubenswrapper[4661]: E1001 05:30:10.756096 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.779529 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.779601 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.779623 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.779687 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.779710 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:10Z","lastTransitionTime":"2025-10-01T05:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.882433 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.882471 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.882483 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.882521 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.882532 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:10Z","lastTransitionTime":"2025-10-01T05:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.985681 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.985724 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.985735 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.985751 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:10 crc kubenswrapper[4661]: I1001 05:30:10.985764 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:10Z","lastTransitionTime":"2025-10-01T05:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.088593 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.088651 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.088662 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.088677 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.088688 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:11Z","lastTransitionTime":"2025-10-01T05:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.191791 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.191850 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.191869 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.191893 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.191911 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:11Z","lastTransitionTime":"2025-10-01T05:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.294840 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.294912 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.294926 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.294942 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.294953 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:11Z","lastTransitionTime":"2025-10-01T05:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.397387 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.397456 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.397468 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.397485 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.397525 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:11Z","lastTransitionTime":"2025-10-01T05:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.500324 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.500387 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.500405 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.500427 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.500445 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:11Z","lastTransitionTime":"2025-10-01T05:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.603225 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.603286 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.603305 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.603327 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.603344 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:11Z","lastTransitionTime":"2025-10-01T05:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.706338 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.706408 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.706425 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.706448 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.706466 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:11Z","lastTransitionTime":"2025-10-01T05:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.755992 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.755991 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.756657 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:11 crc kubenswrapper[4661]: E1001 05:30:11.756747 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:11 crc kubenswrapper[4661]: E1001 05:30:11.757059 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:11 crc kubenswrapper[4661]: E1001 05:30:11.756960 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.780183 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:11Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.803126 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:11Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.813703 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.814794 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.815038 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.815270 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.815498 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:11Z","lastTransitionTime":"2025-10-01T05:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.825934 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:11Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.843562 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:11Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.861472 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:11Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.876546 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:11Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.895557 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:11Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.911694 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:11Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.917895 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.917936 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.917950 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.917971 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.917986 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:11Z","lastTransitionTime":"2025-10-01T05:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.935817 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:11Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:11 crc kubenswrapper[4661]: I1001 05:30:11.950516 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:11Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.017589 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:30:02Z\\\",\\\"message\\\":\\\"3386 6253 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 05:30:02.013452 6253 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013480 6253 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 05:30:02.013552 6253 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013610 6253 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013922 6253 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.015010 6253 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 05:30:02.015030 6253 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 05:30:02.015043 6253 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1001 05:30:02.015067 6253 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:30:02.015096 6253 factory.go:656] Stopping watch factory\\\\nI1001 05:30:02.015109 6253 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1001 05:30:02.015107 6253 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:30:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fj7kz_openshift-ovn-kubernetes(6834e918-6be2-4c19-ac03-80fa36a2659c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:11Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.020104 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.020153 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.020168 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.020189 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.020204 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:12Z","lastTransitionTime":"2025-10-01T05:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.036039 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56b04919-b144-4049-8ccd-e6de8aaa48a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8680402d36512dd777473cbf20e799044a1917c00862bb415c6743e7e0eeb078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a532bf2b73ecc0de4b459e1b39f05fcc3a989c37ac4be03ac2d695a65598223\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mlbtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:12Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.060739 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:12Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.073273 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:12Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.087429 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:12Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.100453 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rsrzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:12Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.118602 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959c16a6-1b6f-4dfe-af76-d74d00198a19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d44275c6d9c54d40ca33dac51d37ca7a9784345b0890601d797c47f4a31e17ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a86e8d41edfedbf8a5f0c04d2a0f52c06dbfdefbcdf7c0fe37bc5fd151ffccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ddc9d31f544f57f371c09f3655228bbb0be49788003ae551815a758f444e7d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:12Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.122246 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.122283 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.122294 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.122311 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.122323 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:12Z","lastTransitionTime":"2025-10-01T05:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.128423 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:12Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.227403 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.227454 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.227466 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.227483 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.227494 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:12Z","lastTransitionTime":"2025-10-01T05:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.330053 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.330105 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.330118 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.330135 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.330175 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:12Z","lastTransitionTime":"2025-10-01T05:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.433120 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.433182 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.433200 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.433234 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.433256 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:12Z","lastTransitionTime":"2025-10-01T05:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.536384 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.536508 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.536529 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.536554 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.536569 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:12Z","lastTransitionTime":"2025-10-01T05:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.639864 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.639932 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.639943 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.639960 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.639972 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:12Z","lastTransitionTime":"2025-10-01T05:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.742894 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.742944 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.742956 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.742973 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.742987 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:12Z","lastTransitionTime":"2025-10-01T05:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.756314 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:12 crc kubenswrapper[4661]: E1001 05:30:12.756499 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.846886 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.846956 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.846976 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.847003 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.847022 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:12Z","lastTransitionTime":"2025-10-01T05:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.949790 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.949841 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.949854 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.949871 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:12 crc kubenswrapper[4661]: I1001 05:30:12.949886 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:12Z","lastTransitionTime":"2025-10-01T05:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.052982 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.053214 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.053287 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.053402 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.053494 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:13Z","lastTransitionTime":"2025-10-01T05:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.156079 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.156529 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.156597 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.156688 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.156761 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:13Z","lastTransitionTime":"2025-10-01T05:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.260073 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.260133 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.260149 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.260172 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.260188 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:13Z","lastTransitionTime":"2025-10-01T05:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.364186 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.364227 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.364243 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.364263 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.364279 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:13Z","lastTransitionTime":"2025-10-01T05:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.467467 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.467523 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.467540 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.467563 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.467580 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:13Z","lastTransitionTime":"2025-10-01T05:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.570482 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.570551 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.570568 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.570594 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.570612 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:13Z","lastTransitionTime":"2025-10-01T05:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.672897 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.673210 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.673282 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.673363 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.673442 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:13Z","lastTransitionTime":"2025-10-01T05:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.756046 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.756162 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:13 crc kubenswrapper[4661]: E1001 05:30:13.756236 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:13 crc kubenswrapper[4661]: E1001 05:30:13.756329 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.756044 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:13 crc kubenswrapper[4661]: E1001 05:30:13.756460 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.775781 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.775851 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.775872 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.775909 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.775932 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:13Z","lastTransitionTime":"2025-10-01T05:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.878045 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.878082 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.878094 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.878142 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.878155 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:13Z","lastTransitionTime":"2025-10-01T05:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.983937 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.984663 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.984780 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.984884 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:13 crc kubenswrapper[4661]: I1001 05:30:13.985007 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:13Z","lastTransitionTime":"2025-10-01T05:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.088757 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.088858 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.088877 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.088903 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.088920 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:14Z","lastTransitionTime":"2025-10-01T05:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.192874 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.192923 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.192933 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.192952 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.192963 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:14Z","lastTransitionTime":"2025-10-01T05:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.296430 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.296778 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.296865 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.296966 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.297045 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:14Z","lastTransitionTime":"2025-10-01T05:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.400507 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.400579 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.400594 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.400622 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.400666 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:14Z","lastTransitionTime":"2025-10-01T05:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.503903 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.503964 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.503976 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.504005 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.504022 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:14Z","lastTransitionTime":"2025-10-01T05:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.606848 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.606907 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.606927 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.606952 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.606968 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:14Z","lastTransitionTime":"2025-10-01T05:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.710073 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.710171 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.710196 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.710226 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.710248 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:14Z","lastTransitionTime":"2025-10-01T05:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.755907 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:14 crc kubenswrapper[4661]: E1001 05:30:14.756103 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.813348 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.813705 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.813842 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.813974 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.814058 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:14Z","lastTransitionTime":"2025-10-01T05:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.918037 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.918107 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.918125 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.918148 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:14 crc kubenswrapper[4661]: I1001 05:30:14.918165 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:14Z","lastTransitionTime":"2025-10-01T05:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.020727 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.021766 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.021869 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.022019 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.022146 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:15Z","lastTransitionTime":"2025-10-01T05:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.124312 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.124375 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.124400 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.124429 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.124452 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:15Z","lastTransitionTime":"2025-10-01T05:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.227662 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.228343 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.228431 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.228533 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.228617 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:15Z","lastTransitionTime":"2025-10-01T05:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.331232 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.331277 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.331293 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.331314 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.331332 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:15Z","lastTransitionTime":"2025-10-01T05:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.434455 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.434746 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.434847 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.434942 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.435033 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:15Z","lastTransitionTime":"2025-10-01T05:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.537968 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.538367 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.538558 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.538777 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.538955 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:15Z","lastTransitionTime":"2025-10-01T05:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.643546 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.643586 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.643598 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.643616 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.643627 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:15Z","lastTransitionTime":"2025-10-01T05:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.746671 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.746992 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.747124 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.747251 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.747377 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:15Z","lastTransitionTime":"2025-10-01T05:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.756139 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.756234 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:15 crc kubenswrapper[4661]: E1001 05:30:15.756397 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:15 crc kubenswrapper[4661]: E1001 05:30:15.756492 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.756723 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:15 crc kubenswrapper[4661]: E1001 05:30:15.756850 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.850377 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.850434 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.850455 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.850482 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.850505 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:15Z","lastTransitionTime":"2025-10-01T05:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.952978 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.953015 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.953027 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.953049 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:15 crc kubenswrapper[4661]: I1001 05:30:15.953060 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:15Z","lastTransitionTime":"2025-10-01T05:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.055937 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.056001 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.056017 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.056032 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.056043 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:16Z","lastTransitionTime":"2025-10-01T05:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.158667 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.158703 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.158712 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.158725 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.158734 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:16Z","lastTransitionTime":"2025-10-01T05:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.260871 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.260940 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.260963 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.260992 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.261015 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:16Z","lastTransitionTime":"2025-10-01T05:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.364169 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.364240 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.364262 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.364412 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.364444 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:16Z","lastTransitionTime":"2025-10-01T05:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.467462 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.467543 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.467569 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.467597 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.467619 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:16Z","lastTransitionTime":"2025-10-01T05:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.569937 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.569984 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.569998 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.570021 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.570058 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:16Z","lastTransitionTime":"2025-10-01T05:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.672377 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.672431 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.672450 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.672478 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.672495 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:16Z","lastTransitionTime":"2025-10-01T05:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.756024 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:16 crc kubenswrapper[4661]: E1001 05:30:16.756204 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.775244 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.775289 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.775305 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.775328 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.775345 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:16Z","lastTransitionTime":"2025-10-01T05:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.877992 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.878068 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.878088 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.878114 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.878131 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:16Z","lastTransitionTime":"2025-10-01T05:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.980865 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.980920 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.980938 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.980963 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:16 crc kubenswrapper[4661]: I1001 05:30:16.980979 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:16Z","lastTransitionTime":"2025-10-01T05:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.083579 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.083618 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.083642 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.083658 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.083669 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:17Z","lastTransitionTime":"2025-10-01T05:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.185934 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.185988 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.186004 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.186075 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.186093 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:17Z","lastTransitionTime":"2025-10-01T05:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.288361 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.288435 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.288460 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.288492 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.288514 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:17Z","lastTransitionTime":"2025-10-01T05:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.391712 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.391785 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.391803 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.391832 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.391853 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:17Z","lastTransitionTime":"2025-10-01T05:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.494448 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.494524 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.494537 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.494556 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.494569 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:17Z","lastTransitionTime":"2025-10-01T05:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.631318 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.631358 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.631367 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.631382 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.631393 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:17Z","lastTransitionTime":"2025-10-01T05:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.733900 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.733952 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.733968 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.733991 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.734037 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:17Z","lastTransitionTime":"2025-10-01T05:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.755958 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.755975 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.756060 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:17 crc kubenswrapper[4661]: E1001 05:30:17.756119 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:17 crc kubenswrapper[4661]: E1001 05:30:17.756416 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:17 crc kubenswrapper[4661]: E1001 05:30:17.756484 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.756866 4661 scope.go:117] "RemoveContainer" containerID="3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb" Oct 01 05:30:17 crc kubenswrapper[4661]: E1001 05:30:17.757125 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fj7kz_openshift-ovn-kubernetes(6834e918-6be2-4c19-ac03-80fa36a2659c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.836298 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.836337 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.836347 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.836364 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.836374 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:17Z","lastTransitionTime":"2025-10-01T05:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.938944 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.938993 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.939001 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.939016 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:17 crc kubenswrapper[4661]: I1001 05:30:17.939026 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:17Z","lastTransitionTime":"2025-10-01T05:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.041583 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.041648 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.041663 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.041680 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.041691 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:18Z","lastTransitionTime":"2025-10-01T05:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.144314 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.144406 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.144424 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.144449 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.144465 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:18Z","lastTransitionTime":"2025-10-01T05:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.246485 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.246541 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.246560 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.246581 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.246600 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:18Z","lastTransitionTime":"2025-10-01T05:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.349054 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.349113 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.349129 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.349152 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.349168 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:18Z","lastTransitionTime":"2025-10-01T05:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.451935 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.452009 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.452032 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.452057 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.452075 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:18Z","lastTransitionTime":"2025-10-01T05:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.554719 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.554755 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.554765 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.554780 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.554792 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:18Z","lastTransitionTime":"2025-10-01T05:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.657771 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.657834 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.657850 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.657870 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.657885 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:18Z","lastTransitionTime":"2025-10-01T05:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.756824 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:18 crc kubenswrapper[4661]: E1001 05:30:18.756936 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.760563 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.760685 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.760714 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.760745 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.760770 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:18Z","lastTransitionTime":"2025-10-01T05:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.863410 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.863473 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.863491 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.863515 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.863532 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:18Z","lastTransitionTime":"2025-10-01T05:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.965806 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.965885 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.965911 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.965941 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.965995 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:18Z","lastTransitionTime":"2025-10-01T05:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.998590 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.998674 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.998694 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.998717 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:18 crc kubenswrapper[4661]: I1001 05:30:18.998733 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:18Z","lastTransitionTime":"2025-10-01T05:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:19 crc kubenswrapper[4661]: E1001 05:30:19.017646 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:19Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.020928 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.020996 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.021020 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.021049 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.021069 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:19Z","lastTransitionTime":"2025-10-01T05:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:19 crc kubenswrapper[4661]: E1001 05:30:19.038490 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:19Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.042420 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.042463 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.042473 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.042492 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.042504 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:19Z","lastTransitionTime":"2025-10-01T05:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:19 crc kubenswrapper[4661]: E1001 05:30:19.060328 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:19Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.064218 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.064246 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.064257 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.064274 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.064286 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:19Z","lastTransitionTime":"2025-10-01T05:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:19 crc kubenswrapper[4661]: E1001 05:30:19.078359 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:19Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.082287 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.082326 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.082336 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.082351 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.082360 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:19Z","lastTransitionTime":"2025-10-01T05:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:19 crc kubenswrapper[4661]: E1001 05:30:19.094818 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:19Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:19 crc kubenswrapper[4661]: E1001 05:30:19.095067 4661 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.096536 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.096618 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.096700 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.096747 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.096765 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:19Z","lastTransitionTime":"2025-10-01T05:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.199929 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.199973 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.199985 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.200009 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.200023 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:19Z","lastTransitionTime":"2025-10-01T05:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.302858 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.302928 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.302941 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.302959 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.302971 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:19Z","lastTransitionTime":"2025-10-01T05:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.406005 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.406055 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.406068 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.406086 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.406100 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:19Z","lastTransitionTime":"2025-10-01T05:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.508845 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.508914 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.508933 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.508958 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.508978 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:19Z","lastTransitionTime":"2025-10-01T05:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.612468 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.612520 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.612532 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.612552 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.612919 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:19Z","lastTransitionTime":"2025-10-01T05:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.719821 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.719898 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.719919 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.719954 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.719972 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:19Z","lastTransitionTime":"2025-10-01T05:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.756317 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.756361 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.756361 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:19 crc kubenswrapper[4661]: E1001 05:30:19.756545 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:19 crc kubenswrapper[4661]: E1001 05:30:19.756725 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:19 crc kubenswrapper[4661]: E1001 05:30:19.756833 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.823507 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.823572 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.823586 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.823613 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.823653 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:19Z","lastTransitionTime":"2025-10-01T05:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.926260 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.926305 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.926318 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.926337 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.926351 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:19Z","lastTransitionTime":"2025-10-01T05:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:19 crc kubenswrapper[4661]: I1001 05:30:19.976459 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs\") pod \"network-metrics-daemon-rsrzg\" (UID: \"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\") " pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:19 crc kubenswrapper[4661]: E1001 05:30:19.976607 4661 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 05:30:19 crc kubenswrapper[4661]: E1001 05:30:19.976695 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs podName:6f05366d-d4ea-4cf0-b2cf-3a787dca8115 nodeName:}" failed. No retries permitted until 2025-10-01 05:30:51.97667511 +0000 UTC m=+100.914653724 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs") pod "network-metrics-daemon-rsrzg" (UID: "6f05366d-d4ea-4cf0-b2cf-3a787dca8115") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.028895 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.028939 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.028952 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.028970 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.028993 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:20Z","lastTransitionTime":"2025-10-01T05:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.131727 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.131782 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.131793 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.131810 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.131825 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:20Z","lastTransitionTime":"2025-10-01T05:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.234616 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.234680 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.234688 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.234703 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.234713 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:20Z","lastTransitionTime":"2025-10-01T05:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.336654 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.336704 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.336719 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.336736 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.336747 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:20Z","lastTransitionTime":"2025-10-01T05:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.439075 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.439115 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.439125 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.439140 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.439151 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:20Z","lastTransitionTime":"2025-10-01T05:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.541522 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.541560 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.541568 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.541582 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.541591 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:20Z","lastTransitionTime":"2025-10-01T05:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.643974 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.644026 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.644041 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.644059 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.644073 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:20Z","lastTransitionTime":"2025-10-01T05:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.746150 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.746198 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.746208 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.746222 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.746234 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:20Z","lastTransitionTime":"2025-10-01T05:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.756416 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:20 crc kubenswrapper[4661]: E1001 05:30:20.756568 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.848976 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.849041 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.849058 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.849084 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.849102 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:20Z","lastTransitionTime":"2025-10-01T05:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.951442 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.951509 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.951566 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.951591 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:20 crc kubenswrapper[4661]: I1001 05:30:20.951610 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:20Z","lastTransitionTime":"2025-10-01T05:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.054014 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.054041 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.054052 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.054068 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.054078 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:21Z","lastTransitionTime":"2025-10-01T05:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.156479 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.156516 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.156530 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.156549 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.156563 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:21Z","lastTransitionTime":"2025-10-01T05:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.262403 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.262434 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.262445 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.262476 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.262487 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:21Z","lastTransitionTime":"2025-10-01T05:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.364696 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.364784 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.364816 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.364846 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.364868 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:21Z","lastTransitionTime":"2025-10-01T05:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.466770 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.466838 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.466861 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.466888 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.466910 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:21Z","lastTransitionTime":"2025-10-01T05:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.569366 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.569428 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.569445 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.569471 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.569489 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:21Z","lastTransitionTime":"2025-10-01T05:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.672586 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.672677 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.672700 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.672725 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.672744 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:21Z","lastTransitionTime":"2025-10-01T05:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.756473 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.756476 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.756472 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:21 crc kubenswrapper[4661]: E1001 05:30:21.756585 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:21 crc kubenswrapper[4661]: E1001 05:30:21.756723 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:21 crc kubenswrapper[4661]: E1001 05:30:21.756781 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.774916 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.775021 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.775040 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.775063 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.775080 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:21Z","lastTransitionTime":"2025-10-01T05:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.804259 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:30:02Z\\\",\\\"message\\\":\\\"3386 6253 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 05:30:02.013452 6253 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013480 6253 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 05:30:02.013552 6253 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013610 6253 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013922 6253 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.015010 6253 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 05:30:02.015030 6253 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 05:30:02.015043 6253 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1001 05:30:02.015067 6253 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:30:02.015096 6253 factory.go:656] Stopping watch factory\\\\nI1001 05:30:02.015109 6253 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1001 05:30:02.015107 6253 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:30:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fj7kz_openshift-ovn-kubernetes(6834e918-6be2-4c19-ac03-80fa36a2659c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:21Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.820061 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56b04919-b144-4049-8ccd-e6de8aaa48a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8680402d36512dd777473cbf20e799044a1917c00862bb415c6743e7e0eeb078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a532bf2b73ecc0de4b459e1b39f05fcc3a989c37ac4be03ac2d695a65598223\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mlbtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:21Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.834603 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:21Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.849469 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:21Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.870811 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:21Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.878619 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.878720 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.878737 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.878759 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.878777 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:21Z","lastTransitionTime":"2025-10-01T05:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.886271 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:21Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.909783 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:21Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.924703 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:21Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.939474 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:21Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.952189 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rsrzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:21Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.963833 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959c16a6-1b6f-4dfe-af76-d74d00198a19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d44275c6d9c54d40ca33dac51d37ca7a9784345b0890601d797c47f4a31e17ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a86e8d41edfedbf8a5f0c04d2a0f52c06dbfdefbcdf7c0fe37bc5fd151ffccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ddc9d31f544f57f371c09f3655228bbb0be49788003ae551815a758f444e7d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:21Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.972319 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:21Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.982080 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.982117 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.982133 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.982153 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.982168 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:21Z","lastTransitionTime":"2025-10-01T05:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.985649 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:21Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:21 crc kubenswrapper[4661]: I1001 05:30:21.999342 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:21Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.016170 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:22Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.027463 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:22Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.042810 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:22Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.062107 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:22Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.084681 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.085058 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.085250 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.085340 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.085431 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:22Z","lastTransitionTime":"2025-10-01T05:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.188073 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.188126 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.188144 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.188172 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.188189 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:22Z","lastTransitionTime":"2025-10-01T05:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.301408 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.301443 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.301456 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.301470 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.301480 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:22Z","lastTransitionTime":"2025-10-01T05:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.404167 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.404206 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.404219 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.404235 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.404246 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:22Z","lastTransitionTime":"2025-10-01T05:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.506163 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.506197 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.506208 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.506222 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.506233 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:22Z","lastTransitionTime":"2025-10-01T05:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.608164 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.608221 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.608238 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.608261 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.608278 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:22Z","lastTransitionTime":"2025-10-01T05:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.713313 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.713363 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.713377 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.713394 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.713407 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:22Z","lastTransitionTime":"2025-10-01T05:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.755907 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:22 crc kubenswrapper[4661]: E1001 05:30:22.756093 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.815727 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.815788 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.815800 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.815821 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.815832 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:22Z","lastTransitionTime":"2025-10-01T05:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.918905 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.918999 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.919018 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.919041 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:22 crc kubenswrapper[4661]: I1001 05:30:22.919058 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:22Z","lastTransitionTime":"2025-10-01T05:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.022266 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.022325 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.022343 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.022366 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.022409 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:23Z","lastTransitionTime":"2025-10-01T05:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.125551 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.125742 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.125763 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.125787 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.125804 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:23Z","lastTransitionTime":"2025-10-01T05:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.228171 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.228221 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.228241 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.228267 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.228288 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:23Z","lastTransitionTime":"2025-10-01T05:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.330983 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.331028 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.331041 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.331060 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.331073 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:23Z","lastTransitionTime":"2025-10-01T05:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.434014 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.434073 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.434090 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.434111 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.434128 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:23Z","lastTransitionTime":"2025-10-01T05:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.536560 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.536681 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.536702 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.536725 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.536746 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:23Z","lastTransitionTime":"2025-10-01T05:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.638673 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.638736 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.638755 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.638779 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.638796 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:23Z","lastTransitionTime":"2025-10-01T05:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.741707 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.741743 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.741753 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.741767 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.741777 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:23Z","lastTransitionTime":"2025-10-01T05:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.755928 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.755978 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:23 crc kubenswrapper[4661]: E1001 05:30:23.756024 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.756076 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:23 crc kubenswrapper[4661]: E1001 05:30:23.756149 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:23 crc kubenswrapper[4661]: E1001 05:30:23.756264 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.843966 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.844334 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.844479 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.844614 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.844804 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:23Z","lastTransitionTime":"2025-10-01T05:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.947344 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.947410 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.947432 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.947462 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:23 crc kubenswrapper[4661]: I1001 05:30:23.947481 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:23Z","lastTransitionTime":"2025-10-01T05:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.049484 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.049550 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.049568 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.049595 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.049611 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:24Z","lastTransitionTime":"2025-10-01T05:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.151336 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.151380 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.151391 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.151409 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.151420 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:24Z","lastTransitionTime":"2025-10-01T05:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.254452 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.254485 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.254494 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.254509 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.254518 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:24Z","lastTransitionTime":"2025-10-01T05:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.261870 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l96mp_dc3b0e2f-f27e-4420-9323-ec45878c11a6/kube-multus/0.log" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.261941 4661 generic.go:334] "Generic (PLEG): container finished" podID="dc3b0e2f-f27e-4420-9323-ec45878c11a6" containerID="caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f" exitCode=1 Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.261988 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l96mp" event={"ID":"dc3b0e2f-f27e-4420-9323-ec45878c11a6","Type":"ContainerDied","Data":"caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f"} Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.262501 4661 scope.go:117] "RemoveContainer" containerID="caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.298115 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:24Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.317712 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:24Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.329459 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:24Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.346114 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:30:23Z\\\",\\\"message\\\":\\\"2025-10-01T05:29:38+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f7b7da8c-4dc1-4303-a2fa-0dc4b3805093\\\\n2025-10-01T05:29:38+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f7b7da8c-4dc1-4303-a2fa-0dc4b3805093 to /host/opt/cni/bin/\\\\n2025-10-01T05:29:38Z [verbose] multus-daemon started\\\\n2025-10-01T05:29:38Z [verbose] Readiness Indicator file check\\\\n2025-10-01T05:30:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:24Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.357506 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.357552 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.357572 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.357595 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.357612 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:24Z","lastTransitionTime":"2025-10-01T05:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.363733 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:24Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.375766 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:24Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.388574 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:24Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.400664 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:24Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.426431 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:30:02Z\\\",\\\"message\\\":\\\"3386 6253 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 05:30:02.013452 6253 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013480 6253 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 05:30:02.013552 6253 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013610 6253 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013922 6253 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.015010 6253 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 05:30:02.015030 6253 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 05:30:02.015043 6253 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1001 05:30:02.015067 6253 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:30:02.015096 6253 factory.go:656] Stopping watch factory\\\\nI1001 05:30:02.015109 6253 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1001 05:30:02.015107 6253 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:30:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fj7kz_openshift-ovn-kubernetes(6834e918-6be2-4c19-ac03-80fa36a2659c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:24Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.438867 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56b04919-b144-4049-8ccd-e6de8aaa48a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8680402d36512dd777473cbf20e799044a1917c00862bb415c6743e7e0eeb078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a532bf2b73ecc0de4b459e1b39f05fcc3a989c37ac4be03ac2d695a65598223\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mlbtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:24Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.452825 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:24Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.460965 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.460997 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.461008 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.461023 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.461033 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:24Z","lastTransitionTime":"2025-10-01T05:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.464663 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:24Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.477443 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:24Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.491152 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rsrzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:24Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.518722 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:24Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.533351 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:24Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.546021 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:24Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.558615 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959c16a6-1b6f-4dfe-af76-d74d00198a19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d44275c6d9c54d40ca33dac51d37ca7a9784345b0890601d797c47f4a31e17ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a86e8d41edfedbf8a5f0c04d2a0f52c06dbfdefbcdf7c0fe37bc5fd151ffccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ddc9d31f544f57f371c09f3655228bbb0be49788003ae551815a758f444e7d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:24Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.563376 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.563397 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.563408 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.563423 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.563433 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:24Z","lastTransitionTime":"2025-10-01T05:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.667687 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.667745 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.667761 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.667784 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.667802 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:24Z","lastTransitionTime":"2025-10-01T05:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.756911 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:24 crc kubenswrapper[4661]: E1001 05:30:24.757284 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.771083 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.771138 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.771155 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.771180 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.771199 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:24Z","lastTransitionTime":"2025-10-01T05:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.874542 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.874615 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.874659 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.874708 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.874726 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:24Z","lastTransitionTime":"2025-10-01T05:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.977080 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.977154 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.977177 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.977206 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:24 crc kubenswrapper[4661]: I1001 05:30:24.977228 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:24Z","lastTransitionTime":"2025-10-01T05:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.079810 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.079849 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.079861 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.079876 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.079888 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:25Z","lastTransitionTime":"2025-10-01T05:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.182654 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.182706 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.182719 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.182737 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.182748 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:25Z","lastTransitionTime":"2025-10-01T05:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.267048 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l96mp_dc3b0e2f-f27e-4420-9323-ec45878c11a6/kube-multus/0.log" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.267111 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l96mp" event={"ID":"dc3b0e2f-f27e-4420-9323-ec45878c11a6","Type":"ContainerStarted","Data":"9a1102b0dd486d08f91d8b9557d01459f38ec90304d866afb5fc273c51680b51"} Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.281398 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:25Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.285153 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.285194 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.285203 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.285217 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.285228 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:25Z","lastTransitionTime":"2025-10-01T05:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.295036 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:25Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.306867 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:25Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.318984 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a1102b0dd486d08f91d8b9557d01459f38ec90304d866afb5fc273c51680b51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:30:23Z\\\",\\\"message\\\":\\\"2025-10-01T05:29:38+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f7b7da8c-4dc1-4303-a2fa-0dc4b3805093\\\\n2025-10-01T05:29:38+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f7b7da8c-4dc1-4303-a2fa-0dc4b3805093 to /host/opt/cni/bin/\\\\n2025-10-01T05:29:38Z [verbose] multus-daemon started\\\\n2025-10-01T05:29:38Z [verbose] Readiness Indicator file check\\\\n2025-10-01T05:30:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:25Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.332313 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:25Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.344202 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:25Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.357054 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:25Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.372094 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:25Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.387738 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.387774 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.387785 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.387802 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.387814 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:25Z","lastTransitionTime":"2025-10-01T05:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.391109 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:30:02Z\\\",\\\"message\\\":\\\"3386 6253 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 05:30:02.013452 6253 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013480 6253 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 05:30:02.013552 6253 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013610 6253 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013922 6253 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.015010 6253 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 05:30:02.015030 6253 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 05:30:02.015043 6253 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1001 05:30:02.015067 6253 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:30:02.015096 6253 factory.go:656] Stopping watch factory\\\\nI1001 05:30:02.015109 6253 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1001 05:30:02.015107 6253 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:30:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fj7kz_openshift-ovn-kubernetes(6834e918-6be2-4c19-ac03-80fa36a2659c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:25Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.404488 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56b04919-b144-4049-8ccd-e6de8aaa48a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8680402d36512dd777473cbf20e799044a1917c00862bb415c6743e7e0eeb078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a532bf2b73ecc0de4b459e1b39f05fcc3a989c37ac4be03ac2d695a65598223\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mlbtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:25Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.416214 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:25Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.433873 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:25Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.452739 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:25Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.469113 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rsrzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:25Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.489689 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:25Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.490376 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.490436 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.490450 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.490466 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.490477 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:25Z","lastTransitionTime":"2025-10-01T05:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.505936 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:25Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.519020 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:25Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.534312 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959c16a6-1b6f-4dfe-af76-d74d00198a19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d44275c6d9c54d40ca33dac51d37ca7a9784345b0890601d797c47f4a31e17ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a86e8d41edfedbf8a5f0c04d2a0f52c06dbfdefbcdf7c0fe37bc5fd151ffccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ddc9d31f544f57f371c09f3655228bbb0be49788003ae551815a758f444e7d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:25Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.594129 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.594171 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.594181 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.594196 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.594207 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:25Z","lastTransitionTime":"2025-10-01T05:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.696573 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.696678 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.696708 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.696736 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.696758 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:25Z","lastTransitionTime":"2025-10-01T05:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.756818 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.756841 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:25 crc kubenswrapper[4661]: E1001 05:30:25.756978 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.757008 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:25 crc kubenswrapper[4661]: E1001 05:30:25.757117 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:25 crc kubenswrapper[4661]: E1001 05:30:25.757231 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.798934 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.798981 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.798998 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.799022 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.799038 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:25Z","lastTransitionTime":"2025-10-01T05:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.901385 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.901443 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.901462 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.901482 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:25 crc kubenswrapper[4661]: I1001 05:30:25.901498 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:25Z","lastTransitionTime":"2025-10-01T05:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.003542 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.003860 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.003898 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.003929 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.003959 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:26Z","lastTransitionTime":"2025-10-01T05:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.106139 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.106186 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.106197 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.106213 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.106223 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:26Z","lastTransitionTime":"2025-10-01T05:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.209486 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.209564 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.209583 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.209605 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.209622 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:26Z","lastTransitionTime":"2025-10-01T05:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.311410 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.311461 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.311477 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.311500 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.311517 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:26Z","lastTransitionTime":"2025-10-01T05:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.414045 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.414093 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.414104 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.414119 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.414130 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:26Z","lastTransitionTime":"2025-10-01T05:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.516734 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.516820 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.516844 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.516879 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.516905 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:26Z","lastTransitionTime":"2025-10-01T05:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.619872 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.619947 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.619965 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.619988 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.620006 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:26Z","lastTransitionTime":"2025-10-01T05:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.722305 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.722356 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.722373 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.722394 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.722411 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:26Z","lastTransitionTime":"2025-10-01T05:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.756610 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:26 crc kubenswrapper[4661]: E1001 05:30:26.756793 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.773078 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.825593 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.825686 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.825704 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.825726 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.825819 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:26Z","lastTransitionTime":"2025-10-01T05:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.929516 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.929599 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.929670 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.929694 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:26 crc kubenswrapper[4661]: I1001 05:30:26.929712 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:26Z","lastTransitionTime":"2025-10-01T05:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.033271 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.033344 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.033367 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.033397 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.033419 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:27Z","lastTransitionTime":"2025-10-01T05:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.136296 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.136369 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.136393 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.136422 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.136447 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:27Z","lastTransitionTime":"2025-10-01T05:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.239599 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.239716 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.239742 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.239769 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.239787 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:27Z","lastTransitionTime":"2025-10-01T05:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.342143 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.342178 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.342187 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.342201 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.342213 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:27Z","lastTransitionTime":"2025-10-01T05:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.445319 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.445376 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.445393 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.445418 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.445438 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:27Z","lastTransitionTime":"2025-10-01T05:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.548199 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.548273 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.548301 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.548330 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.548350 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:27Z","lastTransitionTime":"2025-10-01T05:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.651593 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.651657 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.651674 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.651692 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.651704 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:27Z","lastTransitionTime":"2025-10-01T05:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.754893 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.754925 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.754938 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.754953 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.754964 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:27Z","lastTransitionTime":"2025-10-01T05:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.756375 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.756429 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:27 crc kubenswrapper[4661]: E1001 05:30:27.756471 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.756574 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:27 crc kubenswrapper[4661]: E1001 05:30:27.756788 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:27 crc kubenswrapper[4661]: E1001 05:30:27.756901 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.858027 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.858082 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.858100 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.858124 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.858142 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:27Z","lastTransitionTime":"2025-10-01T05:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.961046 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.961130 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.961154 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.961181 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:27 crc kubenswrapper[4661]: I1001 05:30:27.961205 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:27Z","lastTransitionTime":"2025-10-01T05:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.063613 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.063677 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.063691 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.063709 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.063721 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:28Z","lastTransitionTime":"2025-10-01T05:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.166432 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.166502 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.166518 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.166553 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.166599 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:28Z","lastTransitionTime":"2025-10-01T05:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.269811 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.269874 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.269899 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.269928 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.269949 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:28Z","lastTransitionTime":"2025-10-01T05:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.372938 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.373016 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.373039 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.373062 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.373080 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:28Z","lastTransitionTime":"2025-10-01T05:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.475942 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.476002 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.476023 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.476051 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.476074 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:28Z","lastTransitionTime":"2025-10-01T05:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.578789 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.578861 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.578876 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.578916 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.578930 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:28Z","lastTransitionTime":"2025-10-01T05:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.681730 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.681787 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.681797 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.681813 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.681825 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:28Z","lastTransitionTime":"2025-10-01T05:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.756480 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:28 crc kubenswrapper[4661]: E1001 05:30:28.756685 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.784748 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.784796 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.784807 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.784826 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.784838 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:28Z","lastTransitionTime":"2025-10-01T05:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.887414 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.887467 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.887485 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.887508 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.887524 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:28Z","lastTransitionTime":"2025-10-01T05:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.990938 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.991006 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.991033 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.991065 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:28 crc kubenswrapper[4661]: I1001 05:30:28.991091 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:28Z","lastTransitionTime":"2025-10-01T05:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.094611 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.094674 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.094688 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.094710 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.094724 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:29Z","lastTransitionTime":"2025-10-01T05:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.197678 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.197720 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.197747 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.197762 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.197770 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:29Z","lastTransitionTime":"2025-10-01T05:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.300460 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.300508 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.300542 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.300559 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.300570 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:29Z","lastTransitionTime":"2025-10-01T05:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.326368 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.326485 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.326512 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.326615 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.326717 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:29Z","lastTransitionTime":"2025-10-01T05:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:29 crc kubenswrapper[4661]: E1001 05:30:29.349165 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:29Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.354858 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.354912 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.354929 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.354951 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.354968 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:29Z","lastTransitionTime":"2025-10-01T05:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:29 crc kubenswrapper[4661]: E1001 05:30:29.376959 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:29Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.384842 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.385088 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.385107 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.385133 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.385153 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:29Z","lastTransitionTime":"2025-10-01T05:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:29 crc kubenswrapper[4661]: E1001 05:30:29.416584 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:29Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.422880 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.422978 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.422997 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.423071 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.423092 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:29Z","lastTransitionTime":"2025-10-01T05:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:29 crc kubenswrapper[4661]: E1001 05:30:29.444028 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:29Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.448940 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.448985 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.448996 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.449012 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.449026 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:29Z","lastTransitionTime":"2025-10-01T05:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:29 crc kubenswrapper[4661]: E1001 05:30:29.467523 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:29Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:29 crc kubenswrapper[4661]: E1001 05:30:29.467699 4661 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.470193 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.470223 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.470234 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.470251 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.470264 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:29Z","lastTransitionTime":"2025-10-01T05:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.573345 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.573403 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.573420 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.573443 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.573463 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:29Z","lastTransitionTime":"2025-10-01T05:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.676135 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.676209 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.676222 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.676246 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.676262 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:29Z","lastTransitionTime":"2025-10-01T05:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.757006 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.757006 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:29 crc kubenswrapper[4661]: E1001 05:30:29.757217 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.757035 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:29 crc kubenswrapper[4661]: E1001 05:30:29.757395 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:29 crc kubenswrapper[4661]: E1001 05:30:29.757570 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.778911 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.778969 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.778983 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.779012 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.779029 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:29Z","lastTransitionTime":"2025-10-01T05:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.882814 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.882888 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.882907 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.882930 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.882945 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:29Z","lastTransitionTime":"2025-10-01T05:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.987585 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.987713 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.987785 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.987851 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:29 crc kubenswrapper[4661]: I1001 05:30:29.987872 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:29Z","lastTransitionTime":"2025-10-01T05:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.091598 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.091701 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.091721 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.091748 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.091765 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:30Z","lastTransitionTime":"2025-10-01T05:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.260132 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.260169 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.260178 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.260192 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.260202 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:30Z","lastTransitionTime":"2025-10-01T05:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.363218 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.363273 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.363290 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.363313 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.363331 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:30Z","lastTransitionTime":"2025-10-01T05:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.469672 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.469712 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.469722 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.469748 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.469759 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:30Z","lastTransitionTime":"2025-10-01T05:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.572466 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.572531 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.572548 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.572570 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.572586 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:30Z","lastTransitionTime":"2025-10-01T05:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.675861 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.675925 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.675943 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.675967 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.675984 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:30Z","lastTransitionTime":"2025-10-01T05:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.756799 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:30 crc kubenswrapper[4661]: E1001 05:30:30.757271 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.779171 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.779249 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.779268 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.779290 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.779307 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:30Z","lastTransitionTime":"2025-10-01T05:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.882946 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.883007 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.883031 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.883059 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.883082 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:30Z","lastTransitionTime":"2025-10-01T05:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.986101 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.986169 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.986193 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.986216 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:30 crc kubenswrapper[4661]: I1001 05:30:30.986233 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:30Z","lastTransitionTime":"2025-10-01T05:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.089030 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.089085 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.089103 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.089129 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.089146 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:31Z","lastTransitionTime":"2025-10-01T05:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.192353 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.192405 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.192421 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.192442 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.192461 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:31Z","lastTransitionTime":"2025-10-01T05:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.295159 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.295223 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.295239 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.295263 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.295281 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:31Z","lastTransitionTime":"2025-10-01T05:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.397531 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.397569 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.397578 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.397617 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.397646 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:31Z","lastTransitionTime":"2025-10-01T05:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.500370 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.500439 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.500461 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.500490 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.500513 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:31Z","lastTransitionTime":"2025-10-01T05:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.603731 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.603794 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.603816 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.603845 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.603866 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:31Z","lastTransitionTime":"2025-10-01T05:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.706554 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.706601 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.706612 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.706648 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.706661 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:31Z","lastTransitionTime":"2025-10-01T05:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.756038 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.756083 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.756174 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:31 crc kubenswrapper[4661]: E1001 05:30:31.756272 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:31 crc kubenswrapper[4661]: E1001 05:30:31.756431 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:31 crc kubenswrapper[4661]: E1001 05:30:31.756707 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.777296 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:31Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.797780 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:31Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.809137 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.809183 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.809199 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.809221 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.809238 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:31Z","lastTransitionTime":"2025-10-01T05:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.815721 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:31Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.847026 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:31Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.862833 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a1102b0dd486d08f91d8b9557d01459f38ec90304d866afb5fc273c51680b51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:30:23Z\\\",\\\"message\\\":\\\"2025-10-01T05:29:38+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f7b7da8c-4dc1-4303-a2fa-0dc4b3805093\\\\n2025-10-01T05:29:38+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f7b7da8c-4dc1-4303-a2fa-0dc4b3805093 to /host/opt/cni/bin/\\\\n2025-10-01T05:29:38Z [verbose] multus-daemon started\\\\n2025-10-01T05:29:38Z [verbose] Readiness Indicator file check\\\\n2025-10-01T05:30:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:31Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.882938 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:31Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.899673 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:31Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.911900 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.911964 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.911985 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.912010 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.912028 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:31Z","lastTransitionTime":"2025-10-01T05:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.922372 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:31Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.940871 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:31Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.973276 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:30:02Z\\\",\\\"message\\\":\\\"3386 6253 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 05:30:02.013452 6253 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013480 6253 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 05:30:02.013552 6253 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013610 6253 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013922 6253 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.015010 6253 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 05:30:02.015030 6253 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 05:30:02.015043 6253 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1001 05:30:02.015067 6253 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:30:02.015096 6253 factory.go:656] Stopping watch factory\\\\nI1001 05:30:02.015109 6253 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1001 05:30:02.015107 6253 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:30:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fj7kz_openshift-ovn-kubernetes(6834e918-6be2-4c19-ac03-80fa36a2659c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:31Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:31 crc kubenswrapper[4661]: I1001 05:30:31.992101 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56b04919-b144-4049-8ccd-e6de8aaa48a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8680402d36512dd777473cbf20e799044a1917c00862bb415c6743e7e0eeb078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a532bf2b73ecc0de4b459e1b39f05fcc3a989c37ac4be03ac2d695a65598223\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mlbtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:31Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.007394 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:32Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.014900 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.014933 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.014944 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.014960 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.014972 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:32Z","lastTransitionTime":"2025-10-01T05:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.028804 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:32Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.064398 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:32Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.080895 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rsrzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:32Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.112434 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:32Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.116906 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.116948 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.116958 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.116974 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.116986 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:32Z","lastTransitionTime":"2025-10-01T05:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.122955 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58da8ff7-e427-4b9d-b7db-acdb2ee9dc2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d0b1154fb43152360c4b932cb380bcfd9d026b840c24371cb7c9c1229e45e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://407cbc7364ba0fb345a3fb2b8f0af7c32d9692bd0dd5f7f91a653a3f38715361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://407cbc7364ba0fb345a3fb2b8f0af7c32d9692bd0dd5f7f91a653a3f38715361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:32Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.135178 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:32Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.150412 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959c16a6-1b6f-4dfe-af76-d74d00198a19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d44275c6d9c54d40ca33dac51d37ca7a9784345b0890601d797c47f4a31e17ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a86e8d41edfedbf8a5f0c04d2a0f52c06dbfdefbcdf7c0fe37bc5fd151ffccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ddc9d31f544f57f371c09f3655228bbb0be49788003ae551815a758f444e7d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:32Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.220009 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.220056 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.220068 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.220085 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.220097 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:32Z","lastTransitionTime":"2025-10-01T05:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.322969 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.323021 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.323038 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.323064 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.323083 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:32Z","lastTransitionTime":"2025-10-01T05:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.426520 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.426606 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.426618 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.426645 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.426658 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:32Z","lastTransitionTime":"2025-10-01T05:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.529246 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.529304 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.529321 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.529344 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.529361 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:32Z","lastTransitionTime":"2025-10-01T05:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.632243 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.632314 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.632334 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.632362 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.632381 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:32Z","lastTransitionTime":"2025-10-01T05:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.739815 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.739877 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.739895 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.739919 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.739939 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:32Z","lastTransitionTime":"2025-10-01T05:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.756169 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:32 crc kubenswrapper[4661]: E1001 05:30:32.756342 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.757449 4661 scope.go:117] "RemoveContainer" containerID="3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.843043 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.843341 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.843352 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.843369 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.843381 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:32Z","lastTransitionTime":"2025-10-01T05:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.947142 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.947182 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.947191 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.947204 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:32 crc kubenswrapper[4661]: I1001 05:30:32.947213 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:32Z","lastTransitionTime":"2025-10-01T05:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.050437 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.050563 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.050851 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.051066 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.051177 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:33Z","lastTransitionTime":"2025-10-01T05:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.154538 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.154601 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.154624 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.154690 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.154716 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:33Z","lastTransitionTime":"2025-10-01T05:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.257012 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.257059 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.257074 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.257096 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.257114 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:33Z","lastTransitionTime":"2025-10-01T05:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.296053 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fj7kz_6834e918-6be2-4c19-ac03-80fa36a2659c/ovnkube-controller/2.log" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.300301 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerStarted","Data":"faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8"} Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.300959 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.322415 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.338823 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.360743 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.360803 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.360822 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.360843 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.360861 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:33Z","lastTransitionTime":"2025-10-01T05:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.366085 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.390048 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.421209 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:30:02Z\\\",\\\"message\\\":\\\"3386 6253 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 05:30:02.013452 6253 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013480 6253 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 05:30:02.013552 6253 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013610 6253 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013922 6253 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.015010 6253 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 05:30:02.015030 6253 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 05:30:02.015043 6253 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1001 05:30:02.015067 6253 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:30:02.015096 6253 factory.go:656] Stopping watch factory\\\\nI1001 05:30:02.015109 6253 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1001 05:30:02.015107 6253 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:30:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:30:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.434653 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56b04919-b144-4049-8ccd-e6de8aaa48a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8680402d36512dd777473cbf20e799044a1917c00862bb415c6743e7e0eeb078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a532bf2b73ecc0de4b459e1b39f05fcc3a989c37ac4be03ac2d695a65598223\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mlbtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.456091 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.463377 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.463428 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.463445 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.463469 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.463487 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:33Z","lastTransitionTime":"2025-10-01T05:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.474180 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.486147 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.496775 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rsrzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.511176 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959c16a6-1b6f-4dfe-af76-d74d00198a19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d44275c6d9c54d40ca33dac51d37ca7a9784345b0890601d797c47f4a31e17ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a86e8d41edfedbf8a5f0c04d2a0f52c06dbfdefbcdf7c0fe37bc5fd151ffccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ddc9d31f544f57f371c09f3655228bbb0be49788003ae551815a758f444e7d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.523071 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58da8ff7-e427-4b9d-b7db-acdb2ee9dc2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d0b1154fb43152360c4b932cb380bcfd9d026b840c24371cb7c9c1229e45e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://407cbc7364ba0fb345a3fb2b8f0af7c32d9692bd0dd5f7f91a653a3f38715361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://407cbc7364ba0fb345a3fb2b8f0af7c32d9692bd0dd5f7f91a653a3f38715361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.533101 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.546001 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.559033 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.565564 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.565598 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.565656 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.565673 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.565682 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:33Z","lastTransitionTime":"2025-10-01T05:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.570874 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.582697 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.596993 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.614297 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a1102b0dd486d08f91d8b9557d01459f38ec90304d866afb5fc273c51680b51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:30:23Z\\\",\\\"message\\\":\\\"2025-10-01T05:29:38+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f7b7da8c-4dc1-4303-a2fa-0dc4b3805093\\\\n2025-10-01T05:29:38+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f7b7da8c-4dc1-4303-a2fa-0dc4b3805093 to /host/opt/cni/bin/\\\\n2025-10-01T05:29:38Z [verbose] multus-daemon started\\\\n2025-10-01T05:29:38Z [verbose] Readiness Indicator file check\\\\n2025-10-01T05:30:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:33Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.668453 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.668497 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.668531 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.668549 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.668562 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:33Z","lastTransitionTime":"2025-10-01T05:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.756248 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.756378 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:33 crc kubenswrapper[4661]: E1001 05:30:33.756431 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.756511 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:33 crc kubenswrapper[4661]: E1001 05:30:33.756585 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:33 crc kubenswrapper[4661]: E1001 05:30:33.756804 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.771299 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.771403 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.771430 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.771460 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.771485 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:33Z","lastTransitionTime":"2025-10-01T05:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.873926 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.874001 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.874023 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.874054 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.874076 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:33Z","lastTransitionTime":"2025-10-01T05:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.977486 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.977605 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.977666 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.977700 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:33 crc kubenswrapper[4661]: I1001 05:30:33.977722 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:33Z","lastTransitionTime":"2025-10-01T05:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.080165 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.080225 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.080242 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.080266 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.080282 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:34Z","lastTransitionTime":"2025-10-01T05:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.183229 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.183284 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.183305 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.183344 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.183378 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:34Z","lastTransitionTime":"2025-10-01T05:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.286810 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.286891 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.286915 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.286946 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.286969 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:34Z","lastTransitionTime":"2025-10-01T05:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.306076 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fj7kz_6834e918-6be2-4c19-ac03-80fa36a2659c/ovnkube-controller/3.log" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.306990 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fj7kz_6834e918-6be2-4c19-ac03-80fa36a2659c/ovnkube-controller/2.log" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.310363 4661 generic.go:334] "Generic (PLEG): container finished" podID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerID="faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8" exitCode=1 Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.310427 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerDied","Data":"faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8"} Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.310488 4661 scope.go:117] "RemoveContainer" containerID="3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.311377 4661 scope.go:117] "RemoveContainer" containerID="faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8" Oct 01 05:30:34 crc kubenswrapper[4661]: E1001 05:30:34.311628 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fj7kz_openshift-ovn-kubernetes(6834e918-6be2-4c19-ac03-80fa36a2659c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.328410 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.346372 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.360735 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rsrzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.389371 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.389416 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.389433 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.389455 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.389470 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:34Z","lastTransitionTime":"2025-10-01T05:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.390351 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.402624 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58da8ff7-e427-4b9d-b7db-acdb2ee9dc2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d0b1154fb43152360c4b932cb380bcfd9d026b840c24371cb7c9c1229e45e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://407cbc7364ba0fb345a3fb2b8f0af7c32d9692bd0dd5f7f91a653a3f38715361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://407cbc7364ba0fb345a3fb2b8f0af7c32d9692bd0dd5f7f91a653a3f38715361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.413521 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.425435 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959c16a6-1b6f-4dfe-af76-d74d00198a19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d44275c6d9c54d40ca33dac51d37ca7a9784345b0890601d797c47f4a31e17ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a86e8d41edfedbf8a5f0c04d2a0f52c06dbfdefbcdf7c0fe37bc5fd151ffccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ddc9d31f544f57f371c09f3655228bbb0be49788003ae551815a758f444e7d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.439349 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.456677 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.478105 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.497867 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.497925 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.497944 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.497968 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.497985 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:34Z","lastTransitionTime":"2025-10-01T05:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.517164 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.544377 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a1102b0dd486d08f91d8b9557d01459f38ec90304d866afb5fc273c51680b51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:30:23Z\\\",\\\"message\\\":\\\"2025-10-01T05:29:38+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f7b7da8c-4dc1-4303-a2fa-0dc4b3805093\\\\n2025-10-01T05:29:38+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f7b7da8c-4dc1-4303-a2fa-0dc4b3805093 to /host/opt/cni/bin/\\\\n2025-10-01T05:29:38Z [verbose] multus-daemon started\\\\n2025-10-01T05:29:38Z [verbose] Readiness Indicator file check\\\\n2025-10-01T05:30:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.561769 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.574895 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.597872 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.601168 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.601217 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.601236 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.601260 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.601289 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:34Z","lastTransitionTime":"2025-10-01T05:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.615585 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.654412 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f0a6228352dce40e6ff0a6f7fa919f57e025883af1ec98bbd4d7437813ea8fb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:30:02Z\\\",\\\"message\\\":\\\"3386 6253 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 05:30:02.013452 6253 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013480 6253 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 05:30:02.013552 6253 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013610 6253 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.013922 6253 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:02.015010 6253 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 05:30:02.015030 6253 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 05:30:02.015043 6253 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1001 05:30:02.015067 6253 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:30:02.015096 6253 factory.go:656] Stopping watch factory\\\\nI1001 05:30:02.015109 6253 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1001 05:30:02.015107 6253 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:30:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:30:34Z\\\",\\\"message\\\":\\\"er.go:208] Removed *v1.Namespace event handler 5\\\\nI1001 05:30:33.937914 6657 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:30:33.937969 6657 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:33.938006 6657 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:33.938159 6657 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:33.938289 6657 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:33.938383 6657 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:33.939909 6657 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1001 05:30:33.939973 6657 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1001 05:30:33.940016 6657 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 05:30:33.940019 6657 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1001 05:30:33.940045 6657 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1001 05:30:33.940067 6657 factory.go:656] Stopping watch factory\\\\nI1001 05:30:33.940084 6657 ovnkube.go:599] Stopped ovnkube\\\\nI1001 05:30:3\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:30:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.670998 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56b04919-b144-4049-8ccd-e6de8aaa48a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8680402d36512dd777473cbf20e799044a1917c00862bb415c6743e7e0eeb078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a532bf2b73ecc0de4b459e1b39f05fcc3a989c37ac4be03ac2d695a65598223\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mlbtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.688327 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:34Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.704828 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.704890 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.704906 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.704929 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.704947 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:34Z","lastTransitionTime":"2025-10-01T05:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.756477 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:34 crc kubenswrapper[4661]: E1001 05:30:34.756616 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.807870 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.807917 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.807928 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.807945 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.807956 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:34Z","lastTransitionTime":"2025-10-01T05:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.911331 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.911397 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.911414 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.911441 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:34 crc kubenswrapper[4661]: I1001 05:30:34.911461 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:34Z","lastTransitionTime":"2025-10-01T05:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.014220 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.014265 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.014275 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.014291 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.014301 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:35Z","lastTransitionTime":"2025-10-01T05:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.036122 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:30:35 crc kubenswrapper[4661]: E1001 05:30:35.036350 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:39.036319141 +0000 UTC m=+147.974297785 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.036444 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:35 crc kubenswrapper[4661]: E1001 05:30:35.036612 4661 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 05:30:35 crc kubenswrapper[4661]: E1001 05:30:35.036744 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 05:31:39.036719542 +0000 UTC m=+147.974698186 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.117454 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.117514 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.117526 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.117541 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.117552 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:35Z","lastTransitionTime":"2025-10-01T05:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.137063 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.137106 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.137147 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:35 crc kubenswrapper[4661]: E1001 05:30:35.137247 4661 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 05:30:35 crc kubenswrapper[4661]: E1001 05:30:35.137285 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 05:30:35 crc kubenswrapper[4661]: E1001 05:30:35.137305 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 05:31:39.137288633 +0000 UTC m=+148.075267247 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 05:30:35 crc kubenswrapper[4661]: E1001 05:30:35.137316 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 05:30:35 crc kubenswrapper[4661]: E1001 05:30:35.137311 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 05:30:35 crc kubenswrapper[4661]: E1001 05:30:35.137362 4661 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 05:30:35 crc kubenswrapper[4661]: E1001 05:30:35.137388 4661 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:30:35 crc kubenswrapper[4661]: E1001 05:30:35.137335 4661 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:30:35 crc kubenswrapper[4661]: E1001 05:30:35.137464 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 05:31:39.137439969 +0000 UTC m=+148.075418613 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:30:35 crc kubenswrapper[4661]: E1001 05:30:35.137564 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 05:31:39.137546811 +0000 UTC m=+148.075525455 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.220034 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.220092 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.220109 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.220131 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.220151 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:35Z","lastTransitionTime":"2025-10-01T05:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.316887 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fj7kz_6834e918-6be2-4c19-ac03-80fa36a2659c/ovnkube-controller/3.log" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.322328 4661 scope.go:117] "RemoveContainer" containerID="faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.322565 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.322625 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.322672 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.322697 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.322717 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:35Z","lastTransitionTime":"2025-10-01T05:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:35 crc kubenswrapper[4661]: E1001 05:30:35.322679 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fj7kz_openshift-ovn-kubernetes(6834e918-6be2-4c19-ac03-80fa36a2659c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.341563 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959c16a6-1b6f-4dfe-af76-d74d00198a19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d44275c6d9c54d40ca33dac51d37ca7a9784345b0890601d797c47f4a31e17ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a86e8d41edfedbf8a5f0c04d2a0f52c06dbfdefbcdf7c0fe37bc5fd151ffccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ddc9d31f544f57f371c09f3655228bbb0be49788003ae551815a758f444e7d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.357446 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58da8ff7-e427-4b9d-b7db-acdb2ee9dc2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d0b1154fb43152360c4b932cb380bcfd9d026b840c24371cb7c9c1229e45e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://407cbc7364ba0fb345a3fb2b8f0af7c32d9692bd0dd5f7f91a653a3f38715361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://407cbc7364ba0fb345a3fb2b8f0af7c32d9692bd0dd5f7f91a653a3f38715361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.376471 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.395876 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.414711 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a1102b0dd486d08f91d8b9557d01459f38ec90304d866afb5fc273c51680b51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:30:23Z\\\",\\\"message\\\":\\\"2025-10-01T05:29:38+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f7b7da8c-4dc1-4303-a2fa-0dc4b3805093\\\\n2025-10-01T05:29:38+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f7b7da8c-4dc1-4303-a2fa-0dc4b3805093 to /host/opt/cni/bin/\\\\n2025-10-01T05:29:38Z [verbose] multus-daemon started\\\\n2025-10-01T05:29:38Z [verbose] Readiness Indicator file check\\\\n2025-10-01T05:30:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.425032 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.425080 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.425091 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.425107 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.425119 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:35Z","lastTransitionTime":"2025-10-01T05:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.432399 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.451881 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.468144 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.482822 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.505251 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:30:34Z\\\",\\\"message\\\":\\\"er.go:208] Removed *v1.Namespace event handler 5\\\\nI1001 05:30:33.937914 6657 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:30:33.937969 6657 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:33.938006 6657 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:33.938159 6657 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:33.938289 6657 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:33.938383 6657 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:33.939909 6657 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1001 05:30:33.939973 6657 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1001 05:30:33.940016 6657 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 05:30:33.940019 6657 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1001 05:30:33.940045 6657 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1001 05:30:33.940067 6657 factory.go:656] Stopping watch factory\\\\nI1001 05:30:33.940084 6657 ovnkube.go:599] Stopped ovnkube\\\\nI1001 05:30:3\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:30:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fj7kz_openshift-ovn-kubernetes(6834e918-6be2-4c19-ac03-80fa36a2659c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.520291 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56b04919-b144-4049-8ccd-e6de8aaa48a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8680402d36512dd777473cbf20e799044a1917c00862bb415c6743e7e0eeb078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a532bf2b73ecc0de4b459e1b39f05fcc3a989c37ac4be03ac2d695a65598223\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mlbtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.528023 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.528110 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.528127 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.528150 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.528171 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:35Z","lastTransitionTime":"2025-10-01T05:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.535767 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.551945 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.574475 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.589787 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.616804 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.630685 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.630723 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.630733 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.630747 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.630757 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:35Z","lastTransitionTime":"2025-10-01T05:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.632006 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.648907 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.662820 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rsrzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:35Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.732949 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.733005 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.733021 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.733040 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.733054 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:35Z","lastTransitionTime":"2025-10-01T05:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.756682 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.756737 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.756764 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:35 crc kubenswrapper[4661]: E1001 05:30:35.756846 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:35 crc kubenswrapper[4661]: E1001 05:30:35.756958 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:35 crc kubenswrapper[4661]: E1001 05:30:35.757123 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.836316 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.836420 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.836440 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.836465 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.836485 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:35Z","lastTransitionTime":"2025-10-01T05:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.939296 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.939401 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.939424 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.939447 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:35 crc kubenswrapper[4661]: I1001 05:30:35.939465 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:35Z","lastTransitionTime":"2025-10-01T05:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.041928 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.041987 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.041999 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.042017 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.042029 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:36Z","lastTransitionTime":"2025-10-01T05:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.145126 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.145327 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.145345 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.145367 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.145384 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:36Z","lastTransitionTime":"2025-10-01T05:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.247918 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.247976 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.247992 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.248015 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.248032 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:36Z","lastTransitionTime":"2025-10-01T05:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.350768 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.350850 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.350878 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.350910 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.350938 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:36Z","lastTransitionTime":"2025-10-01T05:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.453857 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.453932 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.453951 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.453977 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.453996 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:36Z","lastTransitionTime":"2025-10-01T05:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.557201 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.557263 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.557275 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.557290 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.557302 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:36Z","lastTransitionTime":"2025-10-01T05:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.659528 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.659603 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.659659 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.659695 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.659721 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:36Z","lastTransitionTime":"2025-10-01T05:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.755947 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:36 crc kubenswrapper[4661]: E1001 05:30:36.756418 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.762488 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.762540 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.762556 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.762578 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.762596 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:36Z","lastTransitionTime":"2025-10-01T05:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.865736 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.865842 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.865899 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.865924 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.865983 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:36Z","lastTransitionTime":"2025-10-01T05:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.969507 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.969606 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.969628 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.969691 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:36 crc kubenswrapper[4661]: I1001 05:30:36.969709 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:36Z","lastTransitionTime":"2025-10-01T05:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.072577 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.072679 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.072704 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.072732 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.072755 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:37Z","lastTransitionTime":"2025-10-01T05:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.175461 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.175521 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.175538 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.175562 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.175579 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:37Z","lastTransitionTime":"2025-10-01T05:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.278710 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.278757 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.278769 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.278787 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.278799 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:37Z","lastTransitionTime":"2025-10-01T05:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.381940 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.382046 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.382067 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.382141 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.382159 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:37Z","lastTransitionTime":"2025-10-01T05:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.486067 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.486127 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.486152 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.486183 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.486206 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:37Z","lastTransitionTime":"2025-10-01T05:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.589694 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.589728 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.589741 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.589755 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.589764 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:37Z","lastTransitionTime":"2025-10-01T05:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.692330 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.692401 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.692421 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.692447 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.692468 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:37Z","lastTransitionTime":"2025-10-01T05:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.756439 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.756524 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.756498 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:37 crc kubenswrapper[4661]: E1001 05:30:37.756725 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:37 crc kubenswrapper[4661]: E1001 05:30:37.756949 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:37 crc kubenswrapper[4661]: E1001 05:30:37.757227 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.802918 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.802978 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.802994 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.803017 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.803034 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:37Z","lastTransitionTime":"2025-10-01T05:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.906456 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.906537 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.906564 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.906593 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:37 crc kubenswrapper[4661]: I1001 05:30:37.906616 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:37Z","lastTransitionTime":"2025-10-01T05:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.010152 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.010212 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.010232 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.010256 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.010273 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:38Z","lastTransitionTime":"2025-10-01T05:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.112993 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.113056 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.113072 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.113099 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.113122 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:38Z","lastTransitionTime":"2025-10-01T05:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.215954 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.216011 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.216031 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.216054 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.216070 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:38Z","lastTransitionTime":"2025-10-01T05:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.318994 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.319074 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.319100 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.319133 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.319157 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:38Z","lastTransitionTime":"2025-10-01T05:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.421927 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.421989 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.422008 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.422031 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.422052 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:38Z","lastTransitionTime":"2025-10-01T05:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.525531 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.525600 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.525621 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.525683 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.525703 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:38Z","lastTransitionTime":"2025-10-01T05:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.627833 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.627906 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.627924 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.627949 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.627968 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:38Z","lastTransitionTime":"2025-10-01T05:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.730626 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.730717 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.730741 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.730771 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.730791 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:38Z","lastTransitionTime":"2025-10-01T05:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.756156 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:38 crc kubenswrapper[4661]: E1001 05:30:38.756691 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.834497 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.834685 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.834712 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.834744 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.834767 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:38Z","lastTransitionTime":"2025-10-01T05:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.937802 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.937856 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.937872 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.937896 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:38 crc kubenswrapper[4661]: I1001 05:30:38.937914 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:38Z","lastTransitionTime":"2025-10-01T05:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.040768 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.040864 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.040882 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.040906 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.040924 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:39Z","lastTransitionTime":"2025-10-01T05:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.144386 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.144440 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.144459 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.144485 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.144503 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:39Z","lastTransitionTime":"2025-10-01T05:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.247697 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.247784 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.247803 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.247828 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.247846 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:39Z","lastTransitionTime":"2025-10-01T05:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.351370 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.351452 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.351473 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.351498 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.351516 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:39Z","lastTransitionTime":"2025-10-01T05:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.455142 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.455614 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.455852 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.456004 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.456128 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:39Z","lastTransitionTime":"2025-10-01T05:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.559396 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.559462 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.559479 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.559505 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.559522 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:39Z","lastTransitionTime":"2025-10-01T05:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.639323 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.639388 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.639408 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.639435 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.639453 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:39Z","lastTransitionTime":"2025-10-01T05:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:39 crc kubenswrapper[4661]: E1001 05:30:39.661391 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:39Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.666838 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.666879 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.666889 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.666904 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.666917 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:39Z","lastTransitionTime":"2025-10-01T05:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:39 crc kubenswrapper[4661]: E1001 05:30:39.681762 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:39Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.686911 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.687068 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.687093 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.687117 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.687138 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:39Z","lastTransitionTime":"2025-10-01T05:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:39 crc kubenswrapper[4661]: E1001 05:30:39.703492 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:39Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.707975 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.708027 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.708042 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.708058 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.708070 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:39Z","lastTransitionTime":"2025-10-01T05:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:39 crc kubenswrapper[4661]: E1001 05:30:39.723792 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:39Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.727262 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.727315 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.727327 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.727343 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.727355 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:39Z","lastTransitionTime":"2025-10-01T05:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:39 crc kubenswrapper[4661]: E1001 05:30:39.737919 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:39Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:39 crc kubenswrapper[4661]: E1001 05:30:39.738052 4661 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.739401 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.739424 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.739432 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.739444 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.739453 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:39Z","lastTransitionTime":"2025-10-01T05:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.756043 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.756084 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:39 crc kubenswrapper[4661]: E1001 05:30:39.756145 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.756041 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:39 crc kubenswrapper[4661]: E1001 05:30:39.756253 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:39 crc kubenswrapper[4661]: E1001 05:30:39.756460 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.843175 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.843251 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.843275 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.843305 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.843328 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:39Z","lastTransitionTime":"2025-10-01T05:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.947518 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.947577 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.947594 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.947616 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:39 crc kubenswrapper[4661]: I1001 05:30:39.947669 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:39Z","lastTransitionTime":"2025-10-01T05:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.051234 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.051302 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.051326 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.051357 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.051384 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:40Z","lastTransitionTime":"2025-10-01T05:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.154969 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.155044 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.155064 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.155094 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.155113 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:40Z","lastTransitionTime":"2025-10-01T05:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.258217 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.258298 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.258325 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.258358 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.258384 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:40Z","lastTransitionTime":"2025-10-01T05:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.361961 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.362002 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.362021 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.362043 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.362059 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:40Z","lastTransitionTime":"2025-10-01T05:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.465480 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.465560 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.465583 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.465612 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.465679 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:40Z","lastTransitionTime":"2025-10-01T05:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.568669 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.568737 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.568755 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.568781 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.568803 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:40Z","lastTransitionTime":"2025-10-01T05:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.672261 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.672338 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.672362 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.672397 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.672426 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:40Z","lastTransitionTime":"2025-10-01T05:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.756738 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:40 crc kubenswrapper[4661]: E1001 05:30:40.756909 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.775175 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.775240 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.775258 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.775284 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.775300 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:40Z","lastTransitionTime":"2025-10-01T05:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.878530 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.878614 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.878671 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.878702 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.878721 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:40Z","lastTransitionTime":"2025-10-01T05:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.981921 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.982300 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.982320 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.982344 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:40 crc kubenswrapper[4661]: I1001 05:30:40.982361 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:40Z","lastTransitionTime":"2025-10-01T05:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.085005 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.085122 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.085143 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.085213 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.085231 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:41Z","lastTransitionTime":"2025-10-01T05:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.187958 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.188021 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.188041 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.188066 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.188083 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:41Z","lastTransitionTime":"2025-10-01T05:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.291348 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.291425 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.291454 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.291484 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.291507 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:41Z","lastTransitionTime":"2025-10-01T05:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.393950 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.394001 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.394014 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.394030 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.394042 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:41Z","lastTransitionTime":"2025-10-01T05:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.496291 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.496328 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.496339 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.496356 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.496367 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:41Z","lastTransitionTime":"2025-10-01T05:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.598942 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.599015 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.599039 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.599068 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.599089 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:41Z","lastTransitionTime":"2025-10-01T05:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.701831 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.701896 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.701918 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.701948 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.701970 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:41Z","lastTransitionTime":"2025-10-01T05:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.756544 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.756621 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:41 crc kubenswrapper[4661]: E1001 05:30:41.756751 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.756772 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:41 crc kubenswrapper[4661]: E1001 05:30:41.756875 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:41 crc kubenswrapper[4661]: E1001 05:30:41.756928 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.791722 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.805611 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.805717 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.805770 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.805797 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.805816 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:41Z","lastTransitionTime":"2025-10-01T05:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.812823 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.833405 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.850608 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rsrzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.868354 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959c16a6-1b6f-4dfe-af76-d74d00198a19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d44275c6d9c54d40ca33dac51d37ca7a9784345b0890601d797c47f4a31e17ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a86e8d41edfedbf8a5f0c04d2a0f52c06dbfdefbcdf7c0fe37bc5fd151ffccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ddc9d31f544f57f371c09f3655228bbb0be49788003ae551815a758f444e7d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.883833 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58da8ff7-e427-4b9d-b7db-acdb2ee9dc2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d0b1154fb43152360c4b932cb380bcfd9d026b840c24371cb7c9c1229e45e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://407cbc7364ba0fb345a3fb2b8f0af7c32d9692bd0dd5f7f91a653a3f38715361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://407cbc7364ba0fb345a3fb2b8f0af7c32d9692bd0dd5f7f91a653a3f38715361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.899870 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.909524 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.909551 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.909559 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.909572 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.909581 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:41Z","lastTransitionTime":"2025-10-01T05:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.920490 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.935911 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.954052 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.969675 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.984845 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:41 crc kubenswrapper[4661]: I1001 05:30:41.998799 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a1102b0dd486d08f91d8b9557d01459f38ec90304d866afb5fc273c51680b51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:30:23Z\\\",\\\"message\\\":\\\"2025-10-01T05:29:38+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f7b7da8c-4dc1-4303-a2fa-0dc4b3805093\\\\n2025-10-01T05:29:38+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f7b7da8c-4dc1-4303-a2fa-0dc4b3805093 to /host/opt/cni/bin/\\\\n2025-10-01T05:29:38Z [verbose] multus-daemon started\\\\n2025-10-01T05:29:38Z [verbose] Readiness Indicator file check\\\\n2025-10-01T05:30:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:41Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.011747 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.011839 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.011856 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.011881 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.011898 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:42Z","lastTransitionTime":"2025-10-01T05:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.015157 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.026299 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.038314 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.047236 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.063412 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:30:34Z\\\",\\\"message\\\":\\\"er.go:208] Removed *v1.Namespace event handler 5\\\\nI1001 05:30:33.937914 6657 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:30:33.937969 6657 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:33.938006 6657 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:33.938159 6657 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:33.938289 6657 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:33.938383 6657 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:33.939909 6657 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1001 05:30:33.939973 6657 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1001 05:30:33.940016 6657 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 05:30:33.940019 6657 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1001 05:30:33.940045 6657 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1001 05:30:33.940067 6657 factory.go:656] Stopping watch factory\\\\nI1001 05:30:33.940084 6657 ovnkube.go:599] Stopped ovnkube\\\\nI1001 05:30:3\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:30:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fj7kz_openshift-ovn-kubernetes(6834e918-6be2-4c19-ac03-80fa36a2659c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.072997 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56b04919-b144-4049-8ccd-e6de8aaa48a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8680402d36512dd777473cbf20e799044a1917c00862bb415c6743e7e0eeb078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a532bf2b73ecc0de4b459e1b39f05fcc3a989c37ac4be03ac2d695a65598223\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mlbtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:42Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.113781 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.113873 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.113886 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.113911 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.113923 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:42Z","lastTransitionTime":"2025-10-01T05:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.216118 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.216176 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.216188 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.216203 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.216213 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:42Z","lastTransitionTime":"2025-10-01T05:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.319071 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.319110 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.319118 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.319132 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.319140 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:42Z","lastTransitionTime":"2025-10-01T05:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.422765 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.422834 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.422852 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.422875 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.422893 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:42Z","lastTransitionTime":"2025-10-01T05:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.526065 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.526154 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.526180 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.526214 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.526239 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:42Z","lastTransitionTime":"2025-10-01T05:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.629074 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.629120 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.629134 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.629153 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.629165 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:42Z","lastTransitionTime":"2025-10-01T05:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.732754 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.732796 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.732807 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.732823 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.732834 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:42Z","lastTransitionTime":"2025-10-01T05:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.757063 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:42 crc kubenswrapper[4661]: E1001 05:30:42.757276 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.835429 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.835541 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.835564 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.835594 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.835619 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:42Z","lastTransitionTime":"2025-10-01T05:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.938177 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.938220 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.938233 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.938249 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:42 crc kubenswrapper[4661]: I1001 05:30:42.938262 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:42Z","lastTransitionTime":"2025-10-01T05:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.041699 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.041776 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.041796 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.041819 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.041837 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:43Z","lastTransitionTime":"2025-10-01T05:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.144772 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.144837 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.144854 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.144876 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.144895 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:43Z","lastTransitionTime":"2025-10-01T05:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.247187 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.247257 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.247281 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.247310 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.247332 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:43Z","lastTransitionTime":"2025-10-01T05:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.350311 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.350368 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.350386 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.350409 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.350426 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:43Z","lastTransitionTime":"2025-10-01T05:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.453398 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.453473 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.453497 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.453525 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.453551 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:43Z","lastTransitionTime":"2025-10-01T05:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.556685 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.556738 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.556756 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.556777 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.556794 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:43Z","lastTransitionTime":"2025-10-01T05:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.659677 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.659742 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.659760 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.659790 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.659809 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:43Z","lastTransitionTime":"2025-10-01T05:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.757054 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.757069 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.757197 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:43 crc kubenswrapper[4661]: E1001 05:30:43.757458 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:43 crc kubenswrapper[4661]: E1001 05:30:43.757600 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:43 crc kubenswrapper[4661]: E1001 05:30:43.757815 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.763549 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.763707 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.763730 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.763751 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.763813 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:43Z","lastTransitionTime":"2025-10-01T05:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.866244 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.866321 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.866345 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.866371 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.866390 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:43Z","lastTransitionTime":"2025-10-01T05:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.969097 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.969169 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.969187 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.969268 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:43 crc kubenswrapper[4661]: I1001 05:30:43.969288 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:43Z","lastTransitionTime":"2025-10-01T05:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.072891 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.072951 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.072969 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.072992 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.073013 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:44Z","lastTransitionTime":"2025-10-01T05:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.176278 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.176328 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.176345 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.176370 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.176387 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:44Z","lastTransitionTime":"2025-10-01T05:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.279033 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.279149 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.279171 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.279214 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.279240 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:44Z","lastTransitionTime":"2025-10-01T05:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.381998 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.382101 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.382121 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.382159 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.382181 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:44Z","lastTransitionTime":"2025-10-01T05:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.486381 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.486451 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.486461 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.486477 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.486488 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:44Z","lastTransitionTime":"2025-10-01T05:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.589466 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.589538 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.589556 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.589581 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.589598 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:44Z","lastTransitionTime":"2025-10-01T05:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.692856 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.692890 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.692899 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.692912 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.692921 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:44Z","lastTransitionTime":"2025-10-01T05:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.756301 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:44 crc kubenswrapper[4661]: E1001 05:30:44.756506 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.797845 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.797910 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.797927 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.797951 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.797969 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:44Z","lastTransitionTime":"2025-10-01T05:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.908130 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.908195 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.908213 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.908243 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:44 crc kubenswrapper[4661]: I1001 05:30:44.908265 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:44Z","lastTransitionTime":"2025-10-01T05:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.011271 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.011346 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.011370 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.011403 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.011430 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:45Z","lastTransitionTime":"2025-10-01T05:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.114934 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.114998 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.115020 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.115042 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.115060 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:45Z","lastTransitionTime":"2025-10-01T05:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.218081 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.218150 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.218167 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.218190 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.218211 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:45Z","lastTransitionTime":"2025-10-01T05:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.320919 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.320980 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.320997 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.321021 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.321039 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:45Z","lastTransitionTime":"2025-10-01T05:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.425128 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.425240 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.425260 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.425288 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.425308 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:45Z","lastTransitionTime":"2025-10-01T05:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.528594 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.528776 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.528805 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.528852 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.528880 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:45Z","lastTransitionTime":"2025-10-01T05:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.631593 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.631687 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.631701 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.631726 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.631740 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:45Z","lastTransitionTime":"2025-10-01T05:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.735866 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.735959 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.735983 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.736048 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.736072 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:45Z","lastTransitionTime":"2025-10-01T05:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.756790 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:45 crc kubenswrapper[4661]: E1001 05:30:45.756918 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.756934 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.756997 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:45 crc kubenswrapper[4661]: E1001 05:30:45.757112 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:45 crc kubenswrapper[4661]: E1001 05:30:45.757174 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.838404 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.838434 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.838442 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.838454 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.838464 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:45Z","lastTransitionTime":"2025-10-01T05:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.941997 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.942052 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.942069 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.942092 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:45 crc kubenswrapper[4661]: I1001 05:30:45.942110 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:45Z","lastTransitionTime":"2025-10-01T05:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.045174 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.045248 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.045266 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.045294 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.045314 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:46Z","lastTransitionTime":"2025-10-01T05:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.148714 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.148778 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.148796 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.148824 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.148845 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:46Z","lastTransitionTime":"2025-10-01T05:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.252357 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.252490 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.252511 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.252545 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.252567 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:46Z","lastTransitionTime":"2025-10-01T05:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.355139 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.355187 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.355199 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.355221 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.355233 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:46Z","lastTransitionTime":"2025-10-01T05:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.458582 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.458621 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.458649 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.458664 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.458673 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:46Z","lastTransitionTime":"2025-10-01T05:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.561462 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.561533 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.561550 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.561575 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.561591 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:46Z","lastTransitionTime":"2025-10-01T05:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.665020 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.665071 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.665085 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.665104 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.665147 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:46Z","lastTransitionTime":"2025-10-01T05:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.756377 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:46 crc kubenswrapper[4661]: E1001 05:30:46.756571 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.757898 4661 scope.go:117] "RemoveContainer" containerID="faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8" Oct 01 05:30:46 crc kubenswrapper[4661]: E1001 05:30:46.758183 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fj7kz_openshift-ovn-kubernetes(6834e918-6be2-4c19-ac03-80fa36a2659c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.767569 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.767678 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.767703 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.767733 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.767750 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:46Z","lastTransitionTime":"2025-10-01T05:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.871101 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.871166 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.871183 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.871205 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.871222 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:46Z","lastTransitionTime":"2025-10-01T05:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.974372 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.974459 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.974478 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.974506 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:46 crc kubenswrapper[4661]: I1001 05:30:46.974525 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:46Z","lastTransitionTime":"2025-10-01T05:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.077331 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.077400 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.077418 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.077490 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.077516 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:47Z","lastTransitionTime":"2025-10-01T05:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.180487 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.180533 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.180544 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.180559 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.180571 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:47Z","lastTransitionTime":"2025-10-01T05:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.283451 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.283496 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.283509 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.283526 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.283537 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:47Z","lastTransitionTime":"2025-10-01T05:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.386538 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.386596 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.386617 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.386675 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.386697 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:47Z","lastTransitionTime":"2025-10-01T05:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.488941 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.488967 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.488974 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.488985 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.488994 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:47Z","lastTransitionTime":"2025-10-01T05:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.591384 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.591439 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.591453 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.591483 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.591503 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:47Z","lastTransitionTime":"2025-10-01T05:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.694454 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.694519 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.694536 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.694560 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.694576 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:47Z","lastTransitionTime":"2025-10-01T05:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.756769 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.756853 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.756769 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:47 crc kubenswrapper[4661]: E1001 05:30:47.756956 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:47 crc kubenswrapper[4661]: E1001 05:30:47.757170 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:47 crc kubenswrapper[4661]: E1001 05:30:47.757321 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.797389 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.797441 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.797462 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.797486 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.797503 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:47Z","lastTransitionTime":"2025-10-01T05:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.899908 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.899963 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.899980 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.900002 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:47 crc kubenswrapper[4661]: I1001 05:30:47.900019 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:47Z","lastTransitionTime":"2025-10-01T05:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.003263 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.003324 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.003346 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.003378 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.003401 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:48Z","lastTransitionTime":"2025-10-01T05:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.107314 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.107383 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.107401 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.107425 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.107445 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:48Z","lastTransitionTime":"2025-10-01T05:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.211286 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.211364 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.211387 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.211417 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.211444 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:48Z","lastTransitionTime":"2025-10-01T05:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.322224 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.322292 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.322317 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.322348 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.322371 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:48Z","lastTransitionTime":"2025-10-01T05:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.425589 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.425730 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.425759 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.425790 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.425812 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:48Z","lastTransitionTime":"2025-10-01T05:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.529275 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.529332 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.529355 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.529385 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.529408 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:48Z","lastTransitionTime":"2025-10-01T05:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.632738 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.632783 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.632796 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.632815 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.632828 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:48Z","lastTransitionTime":"2025-10-01T05:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.736247 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.736327 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.736348 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.736371 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.736389 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:48Z","lastTransitionTime":"2025-10-01T05:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.756487 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:48 crc kubenswrapper[4661]: E1001 05:30:48.756686 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.839767 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.839839 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.839858 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.839883 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.839904 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:48Z","lastTransitionTime":"2025-10-01T05:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.942389 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.942430 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.942440 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.942454 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:48 crc kubenswrapper[4661]: I1001 05:30:48.942465 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:48Z","lastTransitionTime":"2025-10-01T05:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.045243 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.045370 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.045430 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.045459 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.045514 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:49Z","lastTransitionTime":"2025-10-01T05:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.147622 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.147667 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.147675 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.147687 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.147696 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:49Z","lastTransitionTime":"2025-10-01T05:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.249601 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.249706 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.249736 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.249761 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.249780 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:49Z","lastTransitionTime":"2025-10-01T05:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.352279 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.352337 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.352356 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.352380 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.352398 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:49Z","lastTransitionTime":"2025-10-01T05:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.455256 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.455337 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.455360 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.455393 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.455418 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:49Z","lastTransitionTime":"2025-10-01T05:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.557432 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.557481 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.557497 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.557520 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.557537 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:49Z","lastTransitionTime":"2025-10-01T05:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.661094 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.661165 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.661187 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.661215 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.661238 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:49Z","lastTransitionTime":"2025-10-01T05:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.756402 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.756456 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.756468 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:49 crc kubenswrapper[4661]: E1001 05:30:49.756593 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:49 crc kubenswrapper[4661]: E1001 05:30:49.756731 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:49 crc kubenswrapper[4661]: E1001 05:30:49.756881 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.763574 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.763662 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.763688 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.763716 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.763738 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:49Z","lastTransitionTime":"2025-10-01T05:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.824849 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.824907 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.824927 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.824951 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.824969 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:49Z","lastTransitionTime":"2025-10-01T05:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:49 crc kubenswrapper[4661]: E1001 05:30:49.846740 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:49Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.851930 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.851971 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.851993 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.852023 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.852047 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:49Z","lastTransitionTime":"2025-10-01T05:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:49 crc kubenswrapper[4661]: E1001 05:30:49.872043 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:49Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.876759 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.876807 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.876824 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.876846 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.876865 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:49Z","lastTransitionTime":"2025-10-01T05:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:49 crc kubenswrapper[4661]: E1001 05:30:49.895504 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:49Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.906129 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.906199 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.906218 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.906247 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.906265 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:49Z","lastTransitionTime":"2025-10-01T05:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:49 crc kubenswrapper[4661]: E1001 05:30:49.929342 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:49Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.934849 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.934943 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.934972 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.935003 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.935031 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:49Z","lastTransitionTime":"2025-10-01T05:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:49 crc kubenswrapper[4661]: E1001 05:30:49.958473 4661 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3e4e6ba7-0055-4484-bdb1-7c7b39829e51\\\",\\\"systemUUID\\\":\\\"5955e218-6cd8-4aae-9fe3-15f4479360e0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:49Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:49 crc kubenswrapper[4661]: E1001 05:30:49.958899 4661 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.961287 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.961356 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.961376 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.961403 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:49 crc kubenswrapper[4661]: I1001 05:30:49.961428 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:49Z","lastTransitionTime":"2025-10-01T05:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.064762 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.064819 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.064833 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.064853 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.064868 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:50Z","lastTransitionTime":"2025-10-01T05:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.167940 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.168537 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.168712 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.168797 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.168860 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:50Z","lastTransitionTime":"2025-10-01T05:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.271179 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.271242 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.271254 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.271271 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.271285 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:50Z","lastTransitionTime":"2025-10-01T05:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.374114 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.374408 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.374594 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.374763 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.374894 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:50Z","lastTransitionTime":"2025-10-01T05:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.480175 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.480529 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.480709 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.480936 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.481159 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:50Z","lastTransitionTime":"2025-10-01T05:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.585152 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.585213 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.585231 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.585256 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.585276 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:50Z","lastTransitionTime":"2025-10-01T05:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.687558 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.687608 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.687624 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.687716 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.687733 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:50Z","lastTransitionTime":"2025-10-01T05:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.756173 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:50 crc kubenswrapper[4661]: E1001 05:30:50.756364 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.790467 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.790509 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.790526 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.790547 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.790564 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:50Z","lastTransitionTime":"2025-10-01T05:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.893268 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.893335 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.893359 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.893387 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.893411 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:50Z","lastTransitionTime":"2025-10-01T05:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.996490 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.996556 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.996575 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.996606 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:50 crc kubenswrapper[4661]: I1001 05:30:50.996845 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:50Z","lastTransitionTime":"2025-10-01T05:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.100143 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.100203 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.100220 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.100247 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.100265 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:51Z","lastTransitionTime":"2025-10-01T05:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.203040 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.203085 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.203101 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.203123 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.203140 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:51Z","lastTransitionTime":"2025-10-01T05:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.340304 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.340381 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.340401 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.340431 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.340461 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:51Z","lastTransitionTime":"2025-10-01T05:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.442684 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.442819 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.442845 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.442873 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.442892 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:51Z","lastTransitionTime":"2025-10-01T05:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.546128 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.546187 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.546207 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.546230 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.546249 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:51Z","lastTransitionTime":"2025-10-01T05:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.649448 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.649515 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.649533 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.649558 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.649575 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:51Z","lastTransitionTime":"2025-10-01T05:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.752333 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.752367 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.752375 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.752387 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.752397 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:51Z","lastTransitionTime":"2025-10-01T05:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.756939 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.757023 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.757132 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:51 crc kubenswrapper[4661]: E1001 05:30:51.757194 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:51 crc kubenswrapper[4661]: E1001 05:30:51.757365 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:51 crc kubenswrapper[4661]: E1001 05:30:51.757462 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.777936 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.800257 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-l96mp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc3b0e2f-f27e-4420-9323-ec45878c11a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a1102b0dd486d08f91d8b9557d01459f38ec90304d866afb5fc273c51680b51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:30:23Z\\\",\\\"message\\\":\\\"2025-10-01T05:29:38+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f7b7da8c-4dc1-4303-a2fa-0dc4b3805093\\\\n2025-10-01T05:29:38+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f7b7da8c-4dc1-4303-a2fa-0dc4b3805093 to /host/opt/cni/bin/\\\\n2025-10-01T05:29:38Z [verbose] multus-daemon started\\\\n2025-10-01T05:29:38Z [verbose] Readiness Indicator file check\\\\n2025-10-01T05:30:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dh82c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-l96mp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.815242 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99fd72d1-e663-4a9c-829e-09b932a6f732\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f1c83eafd1b7d901067b18b7c1de9dff36702bf62007bce450fc3535a150d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae1bac5d1c6f44bae0b9c7f3b0399b169f8db30ab6f50e0368067006caf4f4c1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4acbf15769fd67d5b0841fa4e10d870c23ccb96d51faf407170fab2d0e359fde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6789b92b2846d32f973da4a5ce8a9cd6e2fc313c62c6b48f95e9cacbc631badc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c7e636b58b6a219f7a532be1f7445ef4cc5be34e24e31fb5b13f1e473a7e22d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d46849a47b3a87ac4e9fba1f4b5eed03a08f5de2c9ec24ee63f8eb12485f72f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.835120 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeeba91c97125281bc0b5f8d5bffd51a110f08bb7352ab8dafe4e9961cc59177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.854933 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.859793 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.860107 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.860131 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.860399 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.860412 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:51Z","lastTransitionTime":"2025-10-01T05:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.881930 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9da809bbe97d19dc7767a221d106932ee69af57ae616e40563d70bd3f5858c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f54721dad4e5bee37734cc7f567f95f2954296652eb88f43ae30fddab927d8ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.907783 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6834e918-6be2-4c19-ac03-80fa36a2659c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T05:30:34Z\\\",\\\"message\\\":\\\"er.go:208] Removed *v1.Namespace event handler 5\\\\nI1001 05:30:33.937914 6657 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 05:30:33.937969 6657 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:33.938006 6657 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:33.938159 6657 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:33.938289 6657 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:33.938383 6657 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 05:30:33.939909 6657 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1001 05:30:33.939973 6657 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1001 05:30:33.940016 6657 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 05:30:33.940019 6657 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1001 05:30:33.940045 6657 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1001 05:30:33.940067 6657 factory.go:656] Stopping watch factory\\\\nI1001 05:30:33.940084 6657 ovnkube.go:599] Stopped ovnkube\\\\nI1001 05:30:3\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T05:30:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fj7kz_openshift-ovn-kubernetes(6834e918-6be2-4c19-ac03-80fa36a2659c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqhtk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fj7kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.927344 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56b04919-b144-4049-8ccd-e6de8aaa48a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8680402d36512dd777473cbf20e799044a1917c00862bb415c6743e7e0eeb078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a532bf2b73ecc0de4b459e1b39f05fcc3a989c37ac4be03ac2d695a65598223\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xh556\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mlbtk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.942458 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed9774173a4c4d86bcc749b0d31d5dfeaa46e989a6ac700aca603068a43704e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.955416 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-slmf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"584b2672-fbcc-4c9c-9ead-fdf45d9d1fff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2878a50b9a1d233b4dc8b5deadca2ac970adb452ff64cec44916d3f2fd486a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qjvpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-slmf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.963745 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.963832 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.963850 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.963875 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.963893 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:51Z","lastTransitionTime":"2025-10-01T05:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.977807 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cqptt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b037fd65-42ce-46b5-991d-d643006e1acf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b30f65e9b0aa148d8e0d310973d335c3f8b8b2fa626dc33d96a84e864984d9ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4034599814c45a66f43486d382c9978b7ed370b58623a1dcc87d7caee811b90\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7993dc19c9d236300ff8c7dd9b973e45a441812b5ca5c498a49cf5267900247\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99db63dff51ef4b5446cb96d12481afb5a29f933e1d13882b4fc41f11088888\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c650fc6e2019b763d25029c5f6f857259c13a74b8e1e85ac622581867e687a2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3534b5c03b51953ddf0928db8784a4ce05d1c16109f2d0bd22242cc6322150d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76734abdede5db51670e2e4ccede65c00b0a6acbd1574679bc0de7b40e8db782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ss4n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cqptt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:51 crc kubenswrapper[4661]: I1001 05:30:51.993067 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7584c4bc-4202-487e-a2b4-4319f428a792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cedb7e05ebae0a91f63671ef24e22b14e482bebdc4ca31542682dada3636f8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bd8c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wp2wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:51Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.019489 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79bc080f-ea10-4f4d-a526-2a071044e89b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3905c71a700bffc92a111289f1dbaa2f4d5dde64089e03527da034e1c8d8297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a5e335c43b1e14aa1692a9ee6dbe1c777780aadd57cfb2d47ad0f6678f23860\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2fd39477bb4e158b4c1a370b909f79682b263e33fcc93dfe1674ab17883b139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://773e22951f0b9683f1904253079ec107033e1be3d92efa8a8180995bb417e4bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d013b02289da13422725d4d905772b289a8bf8bce91ac7534a4b0ea2afadb81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9ed121a83d9eabc1d7646e5272c993da17d37ceb07edaf293c7d6c498f82d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa3e54c4f3e18043ce080c24e5e6685150547180a57cc0134cd42e53dc1d131\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbd00a6ebc05ef2ff5b3b986e01719b60ba623c42c801b438312a584770459f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:52Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.035785 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dc0cf663-e1f4-4056-b5f7-27451bb25e57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a57fb2ee2dd71563789bbacbf484fa6d89477ba7254c5678f5db049be6ab396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b946570ccad4dcbcc410851def700310c8203f7d11b96450bcab694b2af907da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://594db2c2086dfdb7098491a9bed55902c2a4bfc1b0c87f476d55ea93c37f930e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c05cbfb383b24ac676c0c05ed962c4fe918fa21913215a77ae5498d2624914c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:52Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.045681 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs\") pod \"network-metrics-daemon-rsrzg\" (UID: \"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\") " pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:52 crc kubenswrapper[4661]: E1001 05:30:52.045818 4661 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 05:30:52 crc kubenswrapper[4661]: E1001 05:30:52.045869 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs podName:6f05366d-d4ea-4cf0-b2cf-3a787dca8115 nodeName:}" failed. No retries permitted until 2025-10-01 05:31:56.045856199 +0000 UTC m=+164.983834813 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs") pod "network-metrics-daemon-rsrzg" (UID: "6f05366d-d4ea-4cf0-b2cf-3a787dca8115") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.049284 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:52Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.062493 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4fc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rsrzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:52Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.066570 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.066672 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.066692 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.066718 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.066736 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:52Z","lastTransitionTime":"2025-10-01T05:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.075038 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"959c16a6-1b6f-4dfe-af76-d74d00198a19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d44275c6d9c54d40ca33dac51d37ca7a9784345b0890601d797c47f4a31e17ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a86e8d41edfedbf8a5f0c04d2a0f52c06dbfdefbcdf7c0fe37bc5fd151ffccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ddc9d31f544f57f371c09f3655228bbb0be49788003ae551815a758f444e7d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ecd6f8e84ed2290e5ce19523d181978f85051755ef31887c11770987e7c41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:52Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.086372 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58da8ff7-e427-4b9d-b7db-acdb2ee9dc2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d0b1154fb43152360c4b932cb380bcfd9d026b840c24371cb7c9c1229e45e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://407cbc7364ba0fb345a3fb2b8f0af7c32d9692bd0dd5f7f91a653a3f38715361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://407cbc7364ba0fb345a3fb2b8f0af7c32d9692bd0dd5f7f91a653a3f38715361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T05:29:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T05:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:52Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.098417 4661 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-f8vtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4783f8c4-bc93-4f21-b88f-62167f7ec68b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T05:29:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5162c9befba9d0a3c2c7c5e57a54314481c49d02169327fdfcadc1fcd69a287d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T05:29:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmx9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T05:29:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-f8vtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T05:30:52Z is after 2025-08-24T17:21:41Z" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.169352 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.169429 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.169448 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.169481 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.169500 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:52Z","lastTransitionTime":"2025-10-01T05:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.271986 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.272065 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.272090 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.272121 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.272147 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:52Z","lastTransitionTime":"2025-10-01T05:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.375910 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.375977 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.375993 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.376021 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.376037 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:52Z","lastTransitionTime":"2025-10-01T05:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.478722 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.478790 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.478807 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.478831 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.478847 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:52Z","lastTransitionTime":"2025-10-01T05:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.582376 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.582440 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.582459 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.582484 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.582502 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:52Z","lastTransitionTime":"2025-10-01T05:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.685875 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.685929 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.685953 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.686075 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.686097 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:52Z","lastTransitionTime":"2025-10-01T05:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.757024 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:52 crc kubenswrapper[4661]: E1001 05:30:52.757261 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.789744 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.789830 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.789852 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.789884 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.789906 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:52Z","lastTransitionTime":"2025-10-01T05:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.893211 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.893264 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.893277 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.893295 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.893308 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:52Z","lastTransitionTime":"2025-10-01T05:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.997069 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.997160 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.997179 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.997208 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:52 crc kubenswrapper[4661]: I1001 05:30:52.997238 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:52Z","lastTransitionTime":"2025-10-01T05:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.100369 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.100436 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.100460 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.100485 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.100504 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:53Z","lastTransitionTime":"2025-10-01T05:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.203829 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.203908 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.203931 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.203960 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.203980 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:53Z","lastTransitionTime":"2025-10-01T05:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.306813 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.306895 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.306910 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.306939 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.306957 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:53Z","lastTransitionTime":"2025-10-01T05:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.414376 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.414889 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.414952 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.414997 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.415041 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:53Z","lastTransitionTime":"2025-10-01T05:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.518884 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.518953 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.518973 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.518999 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.519021 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:53Z","lastTransitionTime":"2025-10-01T05:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.622703 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.622777 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.622796 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.622820 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.622838 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:53Z","lastTransitionTime":"2025-10-01T05:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.726487 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.726552 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.726570 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.726594 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.726618 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:53Z","lastTransitionTime":"2025-10-01T05:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.756354 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.756408 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:53 crc kubenswrapper[4661]: E1001 05:30:53.756538 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.756561 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:53 crc kubenswrapper[4661]: E1001 05:30:53.756731 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:53 crc kubenswrapper[4661]: E1001 05:30:53.756955 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.829484 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.829585 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.829666 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.829730 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.829749 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:53Z","lastTransitionTime":"2025-10-01T05:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.932724 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.932791 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.932814 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.932846 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:53 crc kubenswrapper[4661]: I1001 05:30:53.932866 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:53Z","lastTransitionTime":"2025-10-01T05:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.036563 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.036694 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.036720 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.036755 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.036776 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:54Z","lastTransitionTime":"2025-10-01T05:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.143796 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.144318 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.144333 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.144355 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.144372 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:54Z","lastTransitionTime":"2025-10-01T05:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.248738 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.248815 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.248834 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.248862 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.248918 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:54Z","lastTransitionTime":"2025-10-01T05:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.353015 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.353104 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.353129 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.353163 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.353183 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:54Z","lastTransitionTime":"2025-10-01T05:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.455440 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.455507 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.455523 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.455546 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.455564 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:54Z","lastTransitionTime":"2025-10-01T05:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.557773 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.557805 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.557814 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.557827 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.557837 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:54Z","lastTransitionTime":"2025-10-01T05:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.659690 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.659749 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.659760 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.659782 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.659824 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:54Z","lastTransitionTime":"2025-10-01T05:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.756218 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:54 crc kubenswrapper[4661]: E1001 05:30:54.756715 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.761939 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.761973 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.761982 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.761995 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.762005 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:54Z","lastTransitionTime":"2025-10-01T05:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.864670 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.864759 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.864777 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.864796 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.864809 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:54Z","lastTransitionTime":"2025-10-01T05:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.969391 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.969440 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.969455 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.969476 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:54 crc kubenswrapper[4661]: I1001 05:30:54.969499 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:54Z","lastTransitionTime":"2025-10-01T05:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.072745 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.072831 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.072892 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.072926 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.072967 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:55Z","lastTransitionTime":"2025-10-01T05:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.175935 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.175986 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.176001 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.176026 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.176044 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:55Z","lastTransitionTime":"2025-10-01T05:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.279054 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.279116 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.279132 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.279156 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.279174 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:55Z","lastTransitionTime":"2025-10-01T05:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.381845 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.381899 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.381917 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.381941 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.381960 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:55Z","lastTransitionTime":"2025-10-01T05:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.485149 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.485207 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.485222 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.485241 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.485255 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:55Z","lastTransitionTime":"2025-10-01T05:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.589361 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.589445 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.589465 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.589489 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.589511 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:55Z","lastTransitionTime":"2025-10-01T05:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.692868 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.692929 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.692941 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.692959 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.692971 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:55Z","lastTransitionTime":"2025-10-01T05:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.755921 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.755962 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:55 crc kubenswrapper[4661]: E1001 05:30:55.756146 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.756267 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:55 crc kubenswrapper[4661]: E1001 05:30:55.756453 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:55 crc kubenswrapper[4661]: E1001 05:30:55.756520 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.796147 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.796209 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.796225 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.796247 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.796264 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:55Z","lastTransitionTime":"2025-10-01T05:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.899614 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.899711 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.899729 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.899753 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:55 crc kubenswrapper[4661]: I1001 05:30:55.899773 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:55Z","lastTransitionTime":"2025-10-01T05:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.002844 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.002891 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.002900 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.002914 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.002926 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:56Z","lastTransitionTime":"2025-10-01T05:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.106260 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.106342 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.106361 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.106393 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.106411 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:56Z","lastTransitionTime":"2025-10-01T05:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.209144 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.209213 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.209231 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.209255 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.209275 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:56Z","lastTransitionTime":"2025-10-01T05:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.331310 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.331378 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.331395 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.331421 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.331439 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:56Z","lastTransitionTime":"2025-10-01T05:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.435208 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.435255 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.435274 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.435298 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.435316 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:56Z","lastTransitionTime":"2025-10-01T05:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.537928 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.537993 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.538010 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.538035 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.538053 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:56Z","lastTransitionTime":"2025-10-01T05:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.640184 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.640234 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.640250 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.640272 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.640289 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:56Z","lastTransitionTime":"2025-10-01T05:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.743102 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.743153 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.743169 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.743192 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.743211 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:56Z","lastTransitionTime":"2025-10-01T05:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.756813 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:56 crc kubenswrapper[4661]: E1001 05:30:56.757216 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.846017 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.846076 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.846093 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.846118 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.846138 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:56Z","lastTransitionTime":"2025-10-01T05:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.949496 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.949546 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.949565 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.949593 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:56 crc kubenswrapper[4661]: I1001 05:30:56.949611 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:56Z","lastTransitionTime":"2025-10-01T05:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.052598 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.052705 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.052726 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.052752 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.052769 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:57Z","lastTransitionTime":"2025-10-01T05:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.155811 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.155888 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.155907 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.155933 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.155953 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:57Z","lastTransitionTime":"2025-10-01T05:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.259094 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.259141 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.259154 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.259172 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.259186 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:57Z","lastTransitionTime":"2025-10-01T05:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.361390 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.361436 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.361452 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.361472 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.361487 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:57Z","lastTransitionTime":"2025-10-01T05:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.464421 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.464472 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.464489 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.464513 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.464535 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:57Z","lastTransitionTime":"2025-10-01T05:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.568198 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.568259 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.568278 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.568309 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.568327 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:57Z","lastTransitionTime":"2025-10-01T05:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.671990 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.672058 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.672080 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.672104 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.672121 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:57Z","lastTransitionTime":"2025-10-01T05:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.757059 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.757191 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:57 crc kubenswrapper[4661]: E1001 05:30:57.757396 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.757483 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:57 crc kubenswrapper[4661]: E1001 05:30:57.758425 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:57 crc kubenswrapper[4661]: E1001 05:30:57.758672 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.758994 4661 scope.go:117] "RemoveContainer" containerID="faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8" Oct 01 05:30:57 crc kubenswrapper[4661]: E1001 05:30:57.759261 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fj7kz_openshift-ovn-kubernetes(6834e918-6be2-4c19-ac03-80fa36a2659c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.774708 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.774759 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.774778 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.774800 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.774818 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:57Z","lastTransitionTime":"2025-10-01T05:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.877615 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.877716 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.877742 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.877771 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.877796 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:57Z","lastTransitionTime":"2025-10-01T05:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.980549 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.980599 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.980609 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.980627 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:57 crc kubenswrapper[4661]: I1001 05:30:57.980659 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:57Z","lastTransitionTime":"2025-10-01T05:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.085959 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.086016 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.086033 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.086059 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.086153 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:58Z","lastTransitionTime":"2025-10-01T05:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.189746 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.189808 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.189828 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.189852 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.189869 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:58Z","lastTransitionTime":"2025-10-01T05:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.293375 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.293447 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.293471 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.293502 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.293524 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:58Z","lastTransitionTime":"2025-10-01T05:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.396466 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.396536 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.396555 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.396581 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.396599 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:58Z","lastTransitionTime":"2025-10-01T05:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.499227 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.499313 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.499338 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.499372 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.499395 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:58Z","lastTransitionTime":"2025-10-01T05:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.605860 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.605909 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.605927 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.605949 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.605968 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:58Z","lastTransitionTime":"2025-10-01T05:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.709324 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.709402 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.709425 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.709454 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.709482 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:58Z","lastTransitionTime":"2025-10-01T05:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.756329 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:30:58 crc kubenswrapper[4661]: E1001 05:30:58.756533 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.816041 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.816126 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.816159 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.816191 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.816210 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:58Z","lastTransitionTime":"2025-10-01T05:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.919028 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.919101 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.919116 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.919142 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:58 crc kubenswrapper[4661]: I1001 05:30:58.919160 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:58Z","lastTransitionTime":"2025-10-01T05:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.022892 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.022981 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.023007 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.023042 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.023062 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:59Z","lastTransitionTime":"2025-10-01T05:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.126413 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.126482 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.126500 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.126526 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.126546 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:59Z","lastTransitionTime":"2025-10-01T05:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.228939 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.228990 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.229008 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.229028 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.229045 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:59Z","lastTransitionTime":"2025-10-01T05:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.332337 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.333760 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.333835 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.333866 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.333908 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:59Z","lastTransitionTime":"2025-10-01T05:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.437438 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.437492 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.437511 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.437535 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.437555 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:59Z","lastTransitionTime":"2025-10-01T05:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.540438 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.540500 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.540517 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.540544 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.540562 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:59Z","lastTransitionTime":"2025-10-01T05:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.643407 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.643470 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.643488 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.643515 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.643533 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:59Z","lastTransitionTime":"2025-10-01T05:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.746954 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.747029 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.747052 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.747076 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.747093 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:59Z","lastTransitionTime":"2025-10-01T05:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.756474 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.756494 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.756730 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:30:59 crc kubenswrapper[4661]: E1001 05:30:59.757181 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:30:59 crc kubenswrapper[4661]: E1001 05:30:59.757301 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:30:59 crc kubenswrapper[4661]: E1001 05:30:59.757382 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.850091 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.850155 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.850172 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.850203 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.850221 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:59Z","lastTransitionTime":"2025-10-01T05:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.954023 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.954085 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.954101 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.954127 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:30:59 crc kubenswrapper[4661]: I1001 05:30:59.954146 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:30:59Z","lastTransitionTime":"2025-10-01T05:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.057607 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.057729 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.057755 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.057786 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.057806 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:31:00Z","lastTransitionTime":"2025-10-01T05:31:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.088263 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.088342 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.088362 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.088387 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.088407 4661 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T05:31:00Z","lastTransitionTime":"2025-10-01T05:31:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.154840 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-hckbv"] Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.156210 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hckbv" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.159286 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.159390 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.159900 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.165125 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.227920 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-slmf7" podStartSLOduration=88.227902203 podStartE2EDuration="1m28.227902203s" podCreationTimestamp="2025-10-01 05:29:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:00.200741068 +0000 UTC m=+109.138719742" watchObservedRunningTime="2025-10-01 05:31:00.227902203 +0000 UTC m=+109.165880817" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.228165 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-cqptt" podStartSLOduration=88.22815865 podStartE2EDuration="1m28.22815865s" podCreationTimestamp="2025-10-01 05:29:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:00.227507553 +0000 UTC m=+109.165486167" watchObservedRunningTime="2025-10-01 05:31:00.22815865 +0000 UTC m=+109.166137264" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.241039 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/65028585-fab8-476e-83a7-609b69d0b19d-service-ca\") pod \"cluster-version-operator-5c965bbfc6-hckbv\" (UID: \"65028585-fab8-476e-83a7-609b69d0b19d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hckbv" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.241233 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/65028585-fab8-476e-83a7-609b69d0b19d-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-hckbv\" (UID: \"65028585-fab8-476e-83a7-609b69d0b19d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hckbv" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.241352 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/65028585-fab8-476e-83a7-609b69d0b19d-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-hckbv\" (UID: \"65028585-fab8-476e-83a7-609b69d0b19d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hckbv" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.241416 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65028585-fab8-476e-83a7-609b69d0b19d-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-hckbv\" (UID: \"65028585-fab8-476e-83a7-609b69d0b19d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hckbv" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.241444 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/65028585-fab8-476e-83a7-609b69d0b19d-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-hckbv\" (UID: \"65028585-fab8-476e-83a7-609b69d0b19d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hckbv" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.241917 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podStartSLOduration=88.241899712 podStartE2EDuration="1m28.241899712s" podCreationTimestamp="2025-10-01 05:29:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:00.241896462 +0000 UTC m=+109.179875086" watchObservedRunningTime="2025-10-01 05:31:00.241899712 +0000 UTC m=+109.179878356" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.309092 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mlbtk" podStartSLOduration=87.3090666 podStartE2EDuration="1m27.3090666s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:00.300425236 +0000 UTC m=+109.238403890" watchObservedRunningTime="2025-10-01 05:31:00.3090666 +0000 UTC m=+109.247045254" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.338244 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=90.338212258 podStartE2EDuration="1m30.338212258s" podCreationTimestamp="2025-10-01 05:29:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:00.337924251 +0000 UTC m=+109.275902905" watchObservedRunningTime="2025-10-01 05:31:00.338212258 +0000 UTC m=+109.276190882" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.343614 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/65028585-fab8-476e-83a7-609b69d0b19d-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-hckbv\" (UID: \"65028585-fab8-476e-83a7-609b69d0b19d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hckbv" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.343719 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65028585-fab8-476e-83a7-609b69d0b19d-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-hckbv\" (UID: \"65028585-fab8-476e-83a7-609b69d0b19d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hckbv" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.343767 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/65028585-fab8-476e-83a7-609b69d0b19d-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-hckbv\" (UID: \"65028585-fab8-476e-83a7-609b69d0b19d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hckbv" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.343841 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/65028585-fab8-476e-83a7-609b69d0b19d-service-ca\") pod \"cluster-version-operator-5c965bbfc6-hckbv\" (UID: \"65028585-fab8-476e-83a7-609b69d0b19d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hckbv" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.343903 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/65028585-fab8-476e-83a7-609b69d0b19d-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-hckbv\" (UID: \"65028585-fab8-476e-83a7-609b69d0b19d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hckbv" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.344014 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/65028585-fab8-476e-83a7-609b69d0b19d-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-hckbv\" (UID: \"65028585-fab8-476e-83a7-609b69d0b19d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hckbv" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.344093 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/65028585-fab8-476e-83a7-609b69d0b19d-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-hckbv\" (UID: \"65028585-fab8-476e-83a7-609b69d0b19d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hckbv" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.345806 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/65028585-fab8-476e-83a7-609b69d0b19d-service-ca\") pod \"cluster-version-operator-5c965bbfc6-hckbv\" (UID: \"65028585-fab8-476e-83a7-609b69d0b19d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hckbv" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.357969 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65028585-fab8-476e-83a7-609b69d0b19d-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-hckbv\" (UID: \"65028585-fab8-476e-83a7-609b69d0b19d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hckbv" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.369507 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/65028585-fab8-476e-83a7-609b69d0b19d-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-hckbv\" (UID: \"65028585-fab8-476e-83a7-609b69d0b19d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hckbv" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.396378 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=90.396358052 podStartE2EDuration="1m30.396358052s" podCreationTimestamp="2025-10-01 05:29:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:00.363074991 +0000 UTC m=+109.301053645" watchObservedRunningTime="2025-10-01 05:31:00.396358052 +0000 UTC m=+109.334336686" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.422204 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=56.422185361 podStartE2EDuration="56.422185361s" podCreationTimestamp="2025-10-01 05:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:00.421838972 +0000 UTC m=+109.359817596" watchObservedRunningTime="2025-10-01 05:31:00.422185361 +0000 UTC m=+109.360163985" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.432022 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=34.432003767 podStartE2EDuration="34.432003767s" podCreationTimestamp="2025-10-01 05:30:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:00.431517705 +0000 UTC m=+109.369496329" watchObservedRunningTime="2025-10-01 05:31:00.432003767 +0000 UTC m=+109.369982391" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.444927 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-f8vtz" podStartSLOduration=88.444904956 podStartE2EDuration="1m28.444904956s" podCreationTimestamp="2025-10-01 05:29:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:00.444348771 +0000 UTC m=+109.382327395" watchObservedRunningTime="2025-10-01 05:31:00.444904956 +0000 UTC m=+109.382883580" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.473247 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=91.473222463 podStartE2EDuration="1m31.473222463s" podCreationTimestamp="2025-10-01 05:29:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:00.471722572 +0000 UTC m=+109.409701216" watchObservedRunningTime="2025-10-01 05:31:00.473222463 +0000 UTC m=+109.411201087" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.477580 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hckbv" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.562072 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-l96mp" podStartSLOduration=88.562049497 podStartE2EDuration="1m28.562049497s" podCreationTimestamp="2025-10-01 05:29:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:00.561971515 +0000 UTC m=+109.499950139" watchObservedRunningTime="2025-10-01 05:31:00.562049497 +0000 UTC m=+109.500028131" Oct 01 05:31:00 crc kubenswrapper[4661]: I1001 05:31:00.756596 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:31:00 crc kubenswrapper[4661]: E1001 05:31:00.756839 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:31:01 crc kubenswrapper[4661]: I1001 05:31:01.411134 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hckbv" event={"ID":"65028585-fab8-476e-83a7-609b69d0b19d","Type":"ContainerStarted","Data":"7f8d55bd9a37e6af7a4369c77d7c79bc9e5da4ad2d3e3ec3352d8a9f979e7239"} Oct 01 05:31:01 crc kubenswrapper[4661]: I1001 05:31:01.411198 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hckbv" event={"ID":"65028585-fab8-476e-83a7-609b69d0b19d","Type":"ContainerStarted","Data":"1c0b2922b371abc6db5896bdae4a762c8b60cedbd2b923640913ed0ba7c7ffa3"} Oct 01 05:31:01 crc kubenswrapper[4661]: I1001 05:31:01.756345 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:31:01 crc kubenswrapper[4661]: E1001 05:31:01.758238 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:31:01 crc kubenswrapper[4661]: I1001 05:31:01.758256 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:31:01 crc kubenswrapper[4661]: E1001 05:31:01.758373 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:31:01 crc kubenswrapper[4661]: I1001 05:31:01.758282 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:31:01 crc kubenswrapper[4661]: E1001 05:31:01.758552 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:31:02 crc kubenswrapper[4661]: I1001 05:31:02.756161 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:31:02 crc kubenswrapper[4661]: E1001 05:31:02.756331 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:31:03 crc kubenswrapper[4661]: I1001 05:31:03.756098 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:31:03 crc kubenswrapper[4661]: I1001 05:31:03.756113 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:31:03 crc kubenswrapper[4661]: E1001 05:31:03.756786 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:31:03 crc kubenswrapper[4661]: I1001 05:31:03.756185 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:31:03 crc kubenswrapper[4661]: E1001 05:31:03.756934 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:31:03 crc kubenswrapper[4661]: E1001 05:31:03.757081 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:31:04 crc kubenswrapper[4661]: I1001 05:31:04.756871 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:31:04 crc kubenswrapper[4661]: E1001 05:31:04.757056 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:31:05 crc kubenswrapper[4661]: I1001 05:31:05.755983 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:31:05 crc kubenswrapper[4661]: I1001 05:31:05.755993 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:31:05 crc kubenswrapper[4661]: E1001 05:31:05.756504 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:31:05 crc kubenswrapper[4661]: I1001 05:31:05.756041 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:31:05 crc kubenswrapper[4661]: E1001 05:31:05.756379 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:31:05 crc kubenswrapper[4661]: E1001 05:31:05.756664 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:31:06 crc kubenswrapper[4661]: I1001 05:31:06.756775 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:31:06 crc kubenswrapper[4661]: E1001 05:31:06.756957 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:31:07 crc kubenswrapper[4661]: I1001 05:31:07.756654 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:31:07 crc kubenswrapper[4661]: I1001 05:31:07.756743 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:31:07 crc kubenswrapper[4661]: E1001 05:31:07.756869 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:31:07 crc kubenswrapper[4661]: E1001 05:31:07.757050 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:31:07 crc kubenswrapper[4661]: I1001 05:31:07.757940 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:31:07 crc kubenswrapper[4661]: E1001 05:31:07.758234 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:31:08 crc kubenswrapper[4661]: I1001 05:31:08.756160 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:31:08 crc kubenswrapper[4661]: E1001 05:31:08.756332 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:31:09 crc kubenswrapper[4661]: I1001 05:31:09.756751 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:31:09 crc kubenswrapper[4661]: I1001 05:31:09.756937 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:31:09 crc kubenswrapper[4661]: I1001 05:31:09.757198 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:31:09 crc kubenswrapper[4661]: E1001 05:31:09.757189 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:31:09 crc kubenswrapper[4661]: E1001 05:31:09.757761 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:31:09 crc kubenswrapper[4661]: E1001 05:31:09.757967 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:31:09 crc kubenswrapper[4661]: I1001 05:31:09.758367 4661 scope.go:117] "RemoveContainer" containerID="faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8" Oct 01 05:31:09 crc kubenswrapper[4661]: E1001 05:31:09.758664 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fj7kz_openshift-ovn-kubernetes(6834e918-6be2-4c19-ac03-80fa36a2659c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" Oct 01 05:31:10 crc kubenswrapper[4661]: I1001 05:31:10.445935 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l96mp_dc3b0e2f-f27e-4420-9323-ec45878c11a6/kube-multus/1.log" Oct 01 05:31:10 crc kubenswrapper[4661]: I1001 05:31:10.446531 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l96mp_dc3b0e2f-f27e-4420-9323-ec45878c11a6/kube-multus/0.log" Oct 01 05:31:10 crc kubenswrapper[4661]: I1001 05:31:10.446589 4661 generic.go:334] "Generic (PLEG): container finished" podID="dc3b0e2f-f27e-4420-9323-ec45878c11a6" containerID="9a1102b0dd486d08f91d8b9557d01459f38ec90304d866afb5fc273c51680b51" exitCode=1 Oct 01 05:31:10 crc kubenswrapper[4661]: I1001 05:31:10.446689 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l96mp" event={"ID":"dc3b0e2f-f27e-4420-9323-ec45878c11a6","Type":"ContainerDied","Data":"9a1102b0dd486d08f91d8b9557d01459f38ec90304d866afb5fc273c51680b51"} Oct 01 05:31:10 crc kubenswrapper[4661]: I1001 05:31:10.446806 4661 scope.go:117] "RemoveContainer" containerID="caa18a756d967af8e77e028c2713e5e44fb06e48be1d671dfa18269e1520768f" Oct 01 05:31:10 crc kubenswrapper[4661]: I1001 05:31:10.447589 4661 scope.go:117] "RemoveContainer" containerID="9a1102b0dd486d08f91d8b9557d01459f38ec90304d866afb5fc273c51680b51" Oct 01 05:31:10 crc kubenswrapper[4661]: E1001 05:31:10.448003 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-l96mp_openshift-multus(dc3b0e2f-f27e-4420-9323-ec45878c11a6)\"" pod="openshift-multus/multus-l96mp" podUID="dc3b0e2f-f27e-4420-9323-ec45878c11a6" Oct 01 05:31:10 crc kubenswrapper[4661]: I1001 05:31:10.481339 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hckbv" podStartSLOduration=98.48130729 podStartE2EDuration="1m38.48130729s" podCreationTimestamp="2025-10-01 05:29:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:01.424094029 +0000 UTC m=+110.362072703" watchObservedRunningTime="2025-10-01 05:31:10.48130729 +0000 UTC m=+119.419285974" Oct 01 05:31:10 crc kubenswrapper[4661]: I1001 05:31:10.756552 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:31:10 crc kubenswrapper[4661]: E1001 05:31:10.756802 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:31:11 crc kubenswrapper[4661]: I1001 05:31:11.451690 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l96mp_dc3b0e2f-f27e-4420-9323-ec45878c11a6/kube-multus/1.log" Oct 01 05:31:11 crc kubenswrapper[4661]: E1001 05:31:11.723508 4661 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Oct 01 05:31:11 crc kubenswrapper[4661]: I1001 05:31:11.756338 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:31:11 crc kubenswrapper[4661]: I1001 05:31:11.756387 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:31:11 crc kubenswrapper[4661]: I1001 05:31:11.756510 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:31:11 crc kubenswrapper[4661]: E1001 05:31:11.758217 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:31:11 crc kubenswrapper[4661]: E1001 05:31:11.758412 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:31:11 crc kubenswrapper[4661]: E1001 05:31:11.758587 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:31:11 crc kubenswrapper[4661]: E1001 05:31:11.835719 4661 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 01 05:31:12 crc kubenswrapper[4661]: I1001 05:31:12.756179 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:31:12 crc kubenswrapper[4661]: E1001 05:31:12.756412 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:31:13 crc kubenswrapper[4661]: I1001 05:31:13.756054 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:31:13 crc kubenswrapper[4661]: I1001 05:31:13.756276 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:31:13 crc kubenswrapper[4661]: E1001 05:31:13.756354 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:31:13 crc kubenswrapper[4661]: E1001 05:31:13.756534 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:31:13 crc kubenswrapper[4661]: I1001 05:31:13.756621 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:31:13 crc kubenswrapper[4661]: E1001 05:31:13.756767 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:31:14 crc kubenswrapper[4661]: I1001 05:31:14.756518 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:31:14 crc kubenswrapper[4661]: E1001 05:31:14.756745 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:31:15 crc kubenswrapper[4661]: I1001 05:31:15.756928 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:31:15 crc kubenswrapper[4661]: I1001 05:31:15.757044 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:31:15 crc kubenswrapper[4661]: E1001 05:31:15.757160 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:31:15 crc kubenswrapper[4661]: E1001 05:31:15.757234 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:31:15 crc kubenswrapper[4661]: I1001 05:31:15.757313 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:31:15 crc kubenswrapper[4661]: E1001 05:31:15.757487 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:31:16 crc kubenswrapper[4661]: I1001 05:31:16.756141 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:31:16 crc kubenswrapper[4661]: E1001 05:31:16.756304 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:31:16 crc kubenswrapper[4661]: E1001 05:31:16.836968 4661 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 01 05:31:17 crc kubenswrapper[4661]: I1001 05:31:17.756192 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:31:17 crc kubenswrapper[4661]: I1001 05:31:17.756747 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:31:17 crc kubenswrapper[4661]: I1001 05:31:17.756782 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:31:17 crc kubenswrapper[4661]: E1001 05:31:17.756937 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:31:17 crc kubenswrapper[4661]: E1001 05:31:17.757079 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:31:17 crc kubenswrapper[4661]: E1001 05:31:17.757191 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:31:18 crc kubenswrapper[4661]: I1001 05:31:18.756236 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:31:18 crc kubenswrapper[4661]: E1001 05:31:18.756365 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:31:19 crc kubenswrapper[4661]: I1001 05:31:19.755979 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:31:19 crc kubenswrapper[4661]: I1001 05:31:19.756095 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:31:19 crc kubenswrapper[4661]: I1001 05:31:19.756199 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:31:19 crc kubenswrapper[4661]: E1001 05:31:19.756198 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:31:19 crc kubenswrapper[4661]: E1001 05:31:19.756366 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:31:19 crc kubenswrapper[4661]: E1001 05:31:19.756506 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:31:20 crc kubenswrapper[4661]: I1001 05:31:20.756112 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:31:20 crc kubenswrapper[4661]: E1001 05:31:20.756303 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:31:20 crc kubenswrapper[4661]: I1001 05:31:20.757404 4661 scope.go:117] "RemoveContainer" containerID="faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8" Oct 01 05:31:21 crc kubenswrapper[4661]: I1001 05:31:21.491745 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fj7kz_6834e918-6be2-4c19-ac03-80fa36a2659c/ovnkube-controller/3.log" Oct 01 05:31:21 crc kubenswrapper[4661]: I1001 05:31:21.494908 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerStarted","Data":"d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e"} Oct 01 05:31:21 crc kubenswrapper[4661]: I1001 05:31:21.495344 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:31:21 crc kubenswrapper[4661]: I1001 05:31:21.530403 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" podStartSLOduration=108.530382582 podStartE2EDuration="1m48.530382582s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:21.528883282 +0000 UTC m=+130.466861896" watchObservedRunningTime="2025-10-01 05:31:21.530382582 +0000 UTC m=+130.468361236" Oct 01 05:31:21 crc kubenswrapper[4661]: I1001 05:31:21.756895 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:31:21 crc kubenswrapper[4661]: I1001 05:31:21.757000 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:31:21 crc kubenswrapper[4661]: E1001 05:31:21.758621 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:31:21 crc kubenswrapper[4661]: I1001 05:31:21.758681 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:31:21 crc kubenswrapper[4661]: E1001 05:31:21.758854 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:31:21 crc kubenswrapper[4661]: E1001 05:31:21.759060 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:31:21 crc kubenswrapper[4661]: I1001 05:31:21.783596 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-rsrzg"] Oct 01 05:31:21 crc kubenswrapper[4661]: E1001 05:31:21.838601 4661 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 01 05:31:22 crc kubenswrapper[4661]: I1001 05:31:22.497728 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:31:22 crc kubenswrapper[4661]: E1001 05:31:22.498406 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:31:22 crc kubenswrapper[4661]: I1001 05:31:22.756753 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:31:22 crc kubenswrapper[4661]: E1001 05:31:22.756942 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:31:22 crc kubenswrapper[4661]: I1001 05:31:22.757554 4661 scope.go:117] "RemoveContainer" containerID="9a1102b0dd486d08f91d8b9557d01459f38ec90304d866afb5fc273c51680b51" Oct 01 05:31:23 crc kubenswrapper[4661]: I1001 05:31:23.505528 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l96mp_dc3b0e2f-f27e-4420-9323-ec45878c11a6/kube-multus/1.log" Oct 01 05:31:23 crc kubenswrapper[4661]: I1001 05:31:23.505616 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l96mp" event={"ID":"dc3b0e2f-f27e-4420-9323-ec45878c11a6","Type":"ContainerStarted","Data":"c4f57a5e14aa5a8c99b44e2c1f2de034c084a8ede520375e6f4bc54461906a3e"} Oct 01 05:31:23 crc kubenswrapper[4661]: I1001 05:31:23.756388 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:31:23 crc kubenswrapper[4661]: E1001 05:31:23.756569 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:31:23 crc kubenswrapper[4661]: I1001 05:31:23.756693 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:31:23 crc kubenswrapper[4661]: E1001 05:31:23.756864 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:31:24 crc kubenswrapper[4661]: I1001 05:31:24.756692 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:31:24 crc kubenswrapper[4661]: I1001 05:31:24.756779 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:31:24 crc kubenswrapper[4661]: E1001 05:31:24.756882 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:31:24 crc kubenswrapper[4661]: E1001 05:31:24.757006 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:31:25 crc kubenswrapper[4661]: I1001 05:31:25.756948 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:31:25 crc kubenswrapper[4661]: E1001 05:31:25.757139 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 05:31:25 crc kubenswrapper[4661]: I1001 05:31:25.757224 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:31:25 crc kubenswrapper[4661]: E1001 05:31:25.757430 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 05:31:26 crc kubenswrapper[4661]: I1001 05:31:26.756899 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:31:26 crc kubenswrapper[4661]: I1001 05:31:26.756915 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:31:26 crc kubenswrapper[4661]: E1001 05:31:26.757109 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rsrzg" podUID="6f05366d-d4ea-4cf0-b2cf-3a787dca8115" Oct 01 05:31:26 crc kubenswrapper[4661]: E1001 05:31:26.757270 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 05:31:27 crc kubenswrapper[4661]: I1001 05:31:27.756570 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:31:27 crc kubenswrapper[4661]: I1001 05:31:27.756673 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:31:27 crc kubenswrapper[4661]: I1001 05:31:27.760256 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 01 05:31:27 crc kubenswrapper[4661]: I1001 05:31:27.760532 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 01 05:31:27 crc kubenswrapper[4661]: I1001 05:31:27.761071 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 01 05:31:27 crc kubenswrapper[4661]: I1001 05:31:27.761197 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 01 05:31:28 crc kubenswrapper[4661]: I1001 05:31:28.756811 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:31:28 crc kubenswrapper[4661]: I1001 05:31:28.756812 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:31:28 crc kubenswrapper[4661]: I1001 05:31:28.759363 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 01 05:31:28 crc kubenswrapper[4661]: I1001 05:31:28.759793 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 01 05:31:29 crc kubenswrapper[4661]: I1001 05:31:29.638182 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:31:30 crc kubenswrapper[4661]: I1001 05:31:30.887566 4661 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 01 05:31:30 crc kubenswrapper[4661]: I1001 05:31:30.966745 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k"] Oct 01 05:31:30 crc kubenswrapper[4661]: I1001 05:31:30.967447 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:30 crc kubenswrapper[4661]: I1001 05:31:30.969188 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-gtlzc"] Oct 01 05:31:30 crc kubenswrapper[4661]: I1001 05:31:30.969841 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:30 crc kubenswrapper[4661]: I1001 05:31:30.971700 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 01 05:31:30 crc kubenswrapper[4661]: I1001 05:31:30.972875 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 01 05:31:30 crc kubenswrapper[4661]: I1001 05:31:30.974740 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 01 05:31:30 crc kubenswrapper[4661]: I1001 05:31:30.975058 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 01 05:31:30 crc kubenswrapper[4661]: I1001 05:31:30.975677 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cl7kj"] Oct 01 05:31:30 crc kubenswrapper[4661]: I1001 05:31:30.976188 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cl7kj" Oct 01 05:31:30 crc kubenswrapper[4661]: I1001 05:31:30.979172 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 01 05:31:30 crc kubenswrapper[4661]: I1001 05:31:30.979723 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 01 05:31:30 crc kubenswrapper[4661]: I1001 05:31:30.979959 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 01 05:31:30 crc kubenswrapper[4661]: I1001 05:31:30.980158 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 01 05:31:30 crc kubenswrapper[4661]: I1001 05:31:30.982150 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 01 05:31:30 crc kubenswrapper[4661]: I1001 05:31:30.982685 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 01 05:31:30 crc kubenswrapper[4661]: I1001 05:31:30.982746 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.004472 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.005384 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.005575 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.005754 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.005929 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.006137 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.006286 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.006425 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.006618 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.006776 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.007130 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.007269 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.007977 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.010147 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-c4hjg"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.010476 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-j9mhf"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.010695 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mqx4x"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.011001 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.011482 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.011500 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4hjg" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.011495 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.012716 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-222p4"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.013180 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-222p4" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.013712 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.014185 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.016703 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-sppsj"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.017374 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-sppsj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.018337 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44d90b05-9404-455e-9bc4-17f416ed4b27-config\") pod \"openshift-apiserver-operator-796bbdcf4f-cl7kj\" (UID: \"44d90b05-9404-455e-9bc4-17f416ed4b27\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cl7kj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.018393 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7824f777-df2b-46ba-a9ea-6a428351d121-audit\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.018424 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/44d90b05-9404-455e-9bc4-17f416ed4b27-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-cl7kj\" (UID: \"44d90b05-9404-455e-9bc4-17f416ed4b27\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cl7kj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.018456 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7824f777-df2b-46ba-a9ea-6a428351d121-serving-cert\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.018487 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gn6jx\" (UniqueName: \"kubernetes.io/projected/44d90b05-9404-455e-9bc4-17f416ed4b27-kube-api-access-gn6jx\") pod \"openshift-apiserver-operator-796bbdcf4f-cl7kj\" (UID: \"44d90b05-9404-455e-9bc4-17f416ed4b27\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cl7kj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.018516 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/021ef5bd-da08-44a2-8cf9-47cae92d4c47-audit-policies\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.018543 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7824f777-df2b-46ba-a9ea-6a428351d121-config\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.018565 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7824f777-df2b-46ba-a9ea-6a428351d121-etcd-serving-ca\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.018586 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7824f777-df2b-46ba-a9ea-6a428351d121-node-pullsecrets\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.018605 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/021ef5bd-da08-44a2-8cf9-47cae92d4c47-serving-cert\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.018626 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/021ef5bd-da08-44a2-8cf9-47cae92d4c47-etcd-client\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.018678 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/021ef5bd-da08-44a2-8cf9-47cae92d4c47-audit-dir\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.018701 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7824f777-df2b-46ba-a9ea-6a428351d121-encryption-config\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.018730 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7824f777-df2b-46ba-a9ea-6a428351d121-etcd-client\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.018755 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7824f777-df2b-46ba-a9ea-6a428351d121-trusted-ca-bundle\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.018770 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ctkvv"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.018777 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kcdc\" (UniqueName: \"kubernetes.io/projected/021ef5bd-da08-44a2-8cf9-47cae92d4c47-kube-api-access-9kcdc\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.019375 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7824f777-df2b-46ba-a9ea-6a428351d121-image-import-ca\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.019399 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgb5k\" (UniqueName: \"kubernetes.io/projected/7824f777-df2b-46ba-a9ea-6a428351d121-kube-api-access-cgb5k\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.019414 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ctkvv" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.019432 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7824f777-df2b-46ba-a9ea-6a428351d121-audit-dir\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.019452 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/021ef5bd-da08-44a2-8cf9-47cae92d4c47-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.019471 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/021ef5bd-da08-44a2-8cf9-47cae92d4c47-encryption-config\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.019486 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/021ef5bd-da08-44a2-8cf9-47cae92d4c47-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.024861 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-bnbps"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.025476 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.026050 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.026222 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-nwldz"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.026652 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-nwldz" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.026723 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.026785 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.026929 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.029938 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2vp7m"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.030343 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-zcpdn"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.030696 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zcpdn" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.030945 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.031597 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.031652 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.031808 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.031912 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.032185 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.032302 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.032331 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.032455 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.032601 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.032622 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.032785 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.032930 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.033076 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.033326 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.033332 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-tnf7n"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.034449 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tnf7n" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.033538 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.033582 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.033628 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.033715 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.033851 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.032469 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.034303 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.034334 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.034655 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.034696 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.034847 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.045583 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.045847 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.046956 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.047134 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.050298 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.050578 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.050890 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.051099 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.051785 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.051813 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.051873 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.052054 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.052786 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.052824 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.052957 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.052953 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.053284 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.053395 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.053408 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.053519 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.053546 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.053664 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.053425 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.053850 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.054051 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.054308 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.054353 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.054380 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.054355 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.054596 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.054937 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.055883 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.056015 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.056430 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.056447 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.056931 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.068010 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.068363 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.069228 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-2bwjb"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.072178 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-shn64"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.073316 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-2bwjb" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.074687 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.075912 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rm9mt"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.076399 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-2nj65"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.077012 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-x4m4n"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.079062 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.079652 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-x4m4n" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.080144 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-2nj65" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.080506 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.080770 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-shn64" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.080894 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rm9mt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.080956 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.083293 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.084345 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-md22d"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.084916 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-md22d" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.085421 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.085577 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-plvzs"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.086197 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.086353 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-plvzs" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.087303 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.087511 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.095693 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-x28q5"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.096180 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-nkksl"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.096573 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-nkksl" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.097055 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x28q5" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.097296 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.097833 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.097965 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-vtztn"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.098513 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cl7kj"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.098605 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vtztn" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.120791 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5mmdv"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.120897 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/021ef5bd-da08-44a2-8cf9-47cae92d4c47-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.120932 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/021ef5bd-da08-44a2-8cf9-47cae92d4c47-encryption-config\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.120978 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44d90b05-9404-455e-9bc4-17f416ed4b27-config\") pod \"openshift-apiserver-operator-796bbdcf4f-cl7kj\" (UID: \"44d90b05-9404-455e-9bc4-17f416ed4b27\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cl7kj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.121009 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7824f777-df2b-46ba-a9ea-6a428351d121-audit\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.121033 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/44d90b05-9404-455e-9bc4-17f416ed4b27-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-cl7kj\" (UID: \"44d90b05-9404-455e-9bc4-17f416ed4b27\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cl7kj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.121054 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7824f777-df2b-46ba-a9ea-6a428351d121-serving-cert\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.121071 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gn6jx\" (UniqueName: \"kubernetes.io/projected/44d90b05-9404-455e-9bc4-17f416ed4b27-kube-api-access-gn6jx\") pod \"openshift-apiserver-operator-796bbdcf4f-cl7kj\" (UID: \"44d90b05-9404-455e-9bc4-17f416ed4b27\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cl7kj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.121095 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/021ef5bd-da08-44a2-8cf9-47cae92d4c47-audit-policies\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.121116 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7824f777-df2b-46ba-a9ea-6a428351d121-etcd-serving-ca\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.121136 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7824f777-df2b-46ba-a9ea-6a428351d121-config\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.121155 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7824f777-df2b-46ba-a9ea-6a428351d121-node-pullsecrets\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.121169 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/021ef5bd-da08-44a2-8cf9-47cae92d4c47-serving-cert\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.121188 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/021ef5bd-da08-44a2-8cf9-47cae92d4c47-etcd-client\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.121295 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/021ef5bd-da08-44a2-8cf9-47cae92d4c47-audit-dir\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.121320 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7824f777-df2b-46ba-a9ea-6a428351d121-encryption-config\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.121353 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7824f777-df2b-46ba-a9ea-6a428351d121-etcd-client\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.121371 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7824f777-df2b-46ba-a9ea-6a428351d121-trusted-ca-bundle\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.121394 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kcdc\" (UniqueName: \"kubernetes.io/projected/021ef5bd-da08-44a2-8cf9-47cae92d4c47-kube-api-access-9kcdc\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.121415 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7824f777-df2b-46ba-a9ea-6a428351d121-image-import-ca\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.121449 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgb5k\" (UniqueName: \"kubernetes.io/projected/7824f777-df2b-46ba-a9ea-6a428351d121-kube-api-access-cgb5k\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.121470 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7824f777-df2b-46ba-a9ea-6a428351d121-audit-dir\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.121493 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/021ef5bd-da08-44a2-8cf9-47cae92d4c47-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.122011 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/021ef5bd-da08-44a2-8cf9-47cae92d4c47-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.122353 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/021ef5bd-da08-44a2-8cf9-47cae92d4c47-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.122580 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/021ef5bd-da08-44a2-8cf9-47cae92d4c47-audit-dir\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.122892 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7824f777-df2b-46ba-a9ea-6a428351d121-audit\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.123194 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7824f777-df2b-46ba-a9ea-6a428351d121-node-pullsecrets\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.123239 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/021ef5bd-da08-44a2-8cf9-47cae92d4c47-audit-policies\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.123431 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44d90b05-9404-455e-9bc4-17f416ed4b27-config\") pod \"openshift-apiserver-operator-796bbdcf4f-cl7kj\" (UID: \"44d90b05-9404-455e-9bc4-17f416ed4b27\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cl7kj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.124229 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7824f777-df2b-46ba-a9ea-6a428351d121-config\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.128678 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7824f777-df2b-46ba-a9ea-6a428351d121-etcd-serving-ca\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.130173 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.132797 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/44d90b05-9404-455e-9bc4-17f416ed4b27-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-cl7kj\" (UID: \"44d90b05-9404-455e-9bc4-17f416ed4b27\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cl7kj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.133179 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.134407 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/021ef5bd-da08-44a2-8cf9-47cae92d4c47-serving-cert\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.134537 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7824f777-df2b-46ba-a9ea-6a428351d121-image-import-ca\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.134609 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7824f777-df2b-46ba-a9ea-6a428351d121-audit-dir\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.136215 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7824f777-df2b-46ba-a9ea-6a428351d121-trusted-ca-bundle\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.139448 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/021ef5bd-da08-44a2-8cf9-47cae92d4c47-encryption-config\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.141364 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7824f777-df2b-46ba-a9ea-6a428351d121-etcd-client\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.147143 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zh9kb"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.147460 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-r5ghr"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.152007 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7824f777-df2b-46ba-a9ea-6a428351d121-serving-cert\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.152329 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7824f777-df2b-46ba-a9ea-6a428351d121-encryption-config\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.153173 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/021ef5bd-da08-44a2-8cf9-47cae92d4c47-etcd-client\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.157886 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vpzn9"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.158341 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-qz8x2"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.158599 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.158776 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5mmdv" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.158799 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.158884 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c5cbp"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.158897 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vpzn9" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.158969 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.159014 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-qz8x2" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.159060 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zh9kb" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.159839 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kqlm6"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.160435 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c5cbp" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.162796 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ngdlf"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.163132 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-p6wgf"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.163142 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kqlm6" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.163209 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ngdlf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.163763 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7ldmq"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.164075 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7ldmq" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.164202 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-p6wgf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.165598 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.167011 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.168557 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.170407 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.170472 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-qfb5h"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.171245 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.174757 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.174794 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-6226h"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.176732 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-j9mhf"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.176845 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-6226h" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.176959 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mqx4x"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.183015 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-222p4"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.183154 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ctkvv"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.183167 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-sppsj"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.185984 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-nwldz"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.188795 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-plvzs"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.189077 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.190580 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-x28q5"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.192480 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-tnf7n"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.193931 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-r5ghr"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.195371 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-2bwjb"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.196827 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vpzn9"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.198817 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-x4m4n"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.200572 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-7mdcj"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.201420 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-7mdcj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.204110 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-vtztn"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.205504 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-zcpdn"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.206962 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rm9mt"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.208534 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.208675 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-bnbps"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.210545 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2vp7m"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.212786 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-gtlzc"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.214153 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-2nj65"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.215155 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.216482 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zh9kb"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.217159 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c5cbp"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.218422 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-mzsmw"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.219855 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-md22d"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.219936 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.220970 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5mmdv"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.222621 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-7mdcj"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.222919 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.222946 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hw5v8\" (UniqueName: \"kubernetes.io/projected/b5dd7e68-1886-4112-9923-bc135c6d5302-kube-api-access-hw5v8\") pod \"multus-admission-controller-857f4d67dd-nkksl\" (UID: \"b5dd7e68-1886-4112-9923-bc135c6d5302\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nkksl" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.222964 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9a25b149-9f1f-4723-9c39-e6070685311c-metrics-tls\") pod \"dns-operator-744455d44c-2bwjb\" (UID: \"9a25b149-9f1f-4723-9c39-e6070685311c\") " pod="openshift-dns-operator/dns-operator-744455d44c-2bwjb" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.222980 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v55jq\" (UniqueName: \"kubernetes.io/projected/147f1c2a-2e7c-4443-a428-1ac5222baf00-kube-api-access-v55jq\") pod \"openshift-controller-manager-operator-756b6f6bc6-plvzs\" (UID: \"147f1c2a-2e7c-4443-a428-1ac5222baf00\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-plvzs" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.222994 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5731cb34-0698-48be-a662-79dd89e808b2-config\") pod \"controller-manager-879f6c89f-mqx4x\" (UID: \"5731cb34-0698-48be-a662-79dd89e808b2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223007 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94f516db-b964-4e71-9fdf-8276800923ad-config\") pod \"console-operator-58897d9998-nwldz\" (UID: \"94f516db-b964-4e71-9fdf-8276800923ad\") " pod="openshift-console-operator/console-operator-58897d9998-nwldz" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223022 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/560c851e-0596-4c5f-9191-a7201149e335-images\") pod \"machine-config-operator-74547568cd-x28q5\" (UID: \"560c851e-0596-4c5f-9191-a7201149e335\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x28q5" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223044 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgrr5\" (UniqueName: \"kubernetes.io/projected/34ae1ed8-796d-4296-b003-aed0d5b82e52-kube-api-access-cgrr5\") pod \"ingress-operator-5b745b69d9-zcpdn\" (UID: \"34ae1ed8-796d-4296-b003-aed0d5b82e52\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zcpdn" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223064 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/02fc1037-823e-4082-b228-eb5e00b98f3b-proxy-tls\") pod \"machine-config-controller-84d6567774-x4m4n\" (UID: \"02fc1037-823e-4082-b228-eb5e00b98f3b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-x4m4n" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223077 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/147f1c2a-2e7c-4443-a428-1ac5222baf00-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-plvzs\" (UID: \"147f1c2a-2e7c-4443-a428-1ac5222baf00\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-plvzs" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223091 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pr5d\" (UniqueName: \"kubernetes.io/projected/5731cb34-0698-48be-a662-79dd89e808b2-kube-api-access-2pr5d\") pod \"controller-manager-879f6c89f-mqx4x\" (UID: \"5731cb34-0698-48be-a662-79dd89e808b2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223106 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-audit-dir\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223121 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-audit-policies\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223136 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/560c851e-0596-4c5f-9191-a7201149e335-proxy-tls\") pod \"machine-config-operator-74547568cd-x28q5\" (UID: \"560c851e-0596-4c5f-9191-a7201149e335\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x28q5" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223152 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0c21d97e-1221-464d-ae54-56ea6e626e00-images\") pod \"machine-api-operator-5694c8668f-sppsj\" (UID: \"0c21d97e-1221-464d-ae54-56ea6e626e00\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-sppsj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223167 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223194 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/7da01014-205a-4c43-8640-653fd3b65c0a-default-certificate\") pod \"router-default-5444994796-shn64\" (UID: \"7da01014-205a-4c43-8640-653fd3b65c0a\") " pod="openshift-ingress/router-default-5444994796-shn64" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223208 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5731cb34-0698-48be-a662-79dd89e808b2-client-ca\") pod \"controller-manager-879f6c89f-mqx4x\" (UID: \"5731cb34-0698-48be-a662-79dd89e808b2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223223 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223246 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-client-ca\") pod \"route-controller-manager-6576b87f9c-dbmt2\" (UID: \"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223263 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mlp2\" (UniqueName: \"kubernetes.io/projected/a1b696bc-9f56-41ca-a537-532c2575e5d0-kube-api-access-5mlp2\") pod \"cluster-samples-operator-665b6dd947-ctkvv\" (UID: \"a1b696bc-9f56-41ca-a537-532c2575e5d0\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ctkvv" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223278 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-trusted-ca-bundle\") pod \"console-f9d7485db-bnbps\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223301 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c21d97e-1221-464d-ae54-56ea6e626e00-config\") pod \"machine-api-operator-5694c8668f-sppsj\" (UID: \"0c21d97e-1221-464d-ae54-56ea6e626e00\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-sppsj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223314 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223329 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223344 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/34ae1ed8-796d-4296-b003-aed0d5b82e52-metrics-tls\") pod \"ingress-operator-5b745b69d9-zcpdn\" (UID: \"34ae1ed8-796d-4296-b003-aed0d5b82e52\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zcpdn" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223358 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/560c851e-0596-4c5f-9191-a7201149e335-auth-proxy-config\") pod \"machine-config-operator-74547568cd-x28q5\" (UID: \"560c851e-0596-4c5f-9191-a7201149e335\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x28q5" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223373 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7da01014-205a-4c43-8640-653fd3b65c0a-metrics-certs\") pod \"router-default-5444994796-shn64\" (UID: \"7da01014-205a-4c43-8640-653fd3b65c0a\") " pod="openshift-ingress/router-default-5444994796-shn64" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223388 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/34ae1ed8-796d-4296-b003-aed0d5b82e52-bound-sa-token\") pod \"ingress-operator-5b745b69d9-zcpdn\" (UID: \"34ae1ed8-796d-4296-b003-aed0d5b82e52\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zcpdn" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223403 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/34ae1ed8-796d-4296-b003-aed0d5b82e52-trusted-ca\") pod \"ingress-operator-5b745b69d9-zcpdn\" (UID: \"34ae1ed8-796d-4296-b003-aed0d5b82e52\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zcpdn" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223417 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqrd7\" (UniqueName: \"kubernetes.io/projected/560c851e-0596-4c5f-9191-a7201149e335-kube-api-access-tqrd7\") pod \"machine-config-operator-74547568cd-x28q5\" (UID: \"560c851e-0596-4c5f-9191-a7201149e335\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x28q5" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223433 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223452 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rz5dz\" (UniqueName: \"kubernetes.io/projected/8c150ea9-cd3d-4dab-9701-e7bfef917b0e-kube-api-access-rz5dz\") pod \"kube-storage-version-migrator-operator-b67b599dd-md22d\" (UID: \"8c150ea9-cd3d-4dab-9701-e7bfef917b0e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-md22d" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223473 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-oauth-serving-cert\") pod \"console-f9d7485db-bnbps\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223497 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/7da01014-205a-4c43-8640-653fd3b65c0a-stats-auth\") pod \"router-default-5444994796-shn64\" (UID: \"7da01014-205a-4c43-8640-653fd3b65c0a\") " pod="openshift-ingress/router-default-5444994796-shn64" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223512 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/94f516db-b964-4e71-9fdf-8276800923ad-trusted-ca\") pod \"console-operator-58897d9998-nwldz\" (UID: \"94f516db-b964-4e71-9fdf-8276800923ad\") " pod="openshift-console-operator/console-operator-58897d9998-nwldz" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223526 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223541 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223586 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a1b696bc-9f56-41ca-a537-532c2575e5d0-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-ctkvv\" (UID: \"a1b696bc-9f56-41ca-a537-532c2575e5d0\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ctkvv" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223600 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/147f1c2a-2e7c-4443-a428-1ac5222baf00-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-plvzs\" (UID: \"147f1c2a-2e7c-4443-a428-1ac5222baf00\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-plvzs" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223615 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qjbq\" (UniqueName: \"kubernetes.io/projected/7da01014-205a-4c43-8640-653fd3b65c0a-kube-api-access-7qjbq\") pod \"router-default-5444994796-shn64\" (UID: \"7da01014-205a-4c43-8640-653fd3b65c0a\") " pod="openshift-ingress/router-default-5444994796-shn64" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223646 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/0c21d97e-1221-464d-ae54-56ea6e626e00-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-sppsj\" (UID: \"0c21d97e-1221-464d-ae54-56ea6e626e00\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-sppsj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223673 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qk9cx\" (UniqueName: \"kubernetes.io/projected/9a25b149-9f1f-4723-9c39-e6070685311c-kube-api-access-qk9cx\") pod \"dns-operator-744455d44c-2bwjb\" (UID: \"9a25b149-9f1f-4723-9c39-e6070685311c\") " pod="openshift-dns-operator/dns-operator-744455d44c-2bwjb" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223689 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/02fc1037-823e-4082-b228-eb5e00b98f3b-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-x4m4n\" (UID: \"02fc1037-823e-4082-b228-eb5e00b98f3b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-x4m4n" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223704 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fc570c7d-b32c-4d0b-ba06-73b13143cd0c-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-222p4\" (UID: \"fc570c7d-b32c-4d0b-ba06-73b13143cd0c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-222p4" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223720 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tsk7\" (UniqueName: \"kubernetes.io/projected/fc570c7d-b32c-4d0b-ba06-73b13143cd0c-kube-api-access-5tsk7\") pod \"authentication-operator-69f744f599-222p4\" (UID: \"fc570c7d-b32c-4d0b-ba06-73b13143cd0c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-222p4" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223734 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/591acb44-aa58-4103-98f7-b68e067bc90d-console-oauth-config\") pod \"console-f9d7485db-bnbps\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223749 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223764 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6s6d\" (UniqueName: \"kubernetes.io/projected/5d412a34-f608-4b2d-8485-197efa42d0f9-kube-api-access-w6s6d\") pod \"downloads-7954f5f757-2nj65\" (UID: \"5d412a34-f608-4b2d-8485-197efa42d0f9\") " pod="openshift-console/downloads-7954f5f757-2nj65" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223780 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6f227808-18ba-4538-aed5-d994ba07c2fb-auth-proxy-config\") pod \"machine-approver-56656f9798-c4hjg\" (UID: \"6f227808-18ba-4538-aed5-d994ba07c2fb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4hjg" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223794 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fc570c7d-b32c-4d0b-ba06-73b13143cd0c-service-ca-bundle\") pod \"authentication-operator-69f744f599-222p4\" (UID: \"fc570c7d-b32c-4d0b-ba06-73b13143cd0c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-222p4" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223808 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdwb6\" (UniqueName: \"kubernetes.io/projected/0c21d97e-1221-464d-ae54-56ea6e626e00-kube-api-access-qdwb6\") pod \"machine-api-operator-5694c8668f-sppsj\" (UID: \"0c21d97e-1221-464d-ae54-56ea6e626e00\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-sppsj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223825 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223840 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hd5s\" (UniqueName: \"kubernetes.io/projected/02fc1037-823e-4082-b228-eb5e00b98f3b-kube-api-access-4hd5s\") pod \"machine-config-controller-84d6567774-x4m4n\" (UID: \"02fc1037-823e-4082-b228-eb5e00b98f3b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-x4m4n" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223855 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5731cb34-0698-48be-a662-79dd89e808b2-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-mqx4x\" (UID: \"5731cb34-0698-48be-a662-79dd89e808b2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223870 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223887 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgslq\" (UniqueName: \"kubernetes.io/projected/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-kube-api-access-tgslq\") pod \"route-controller-manager-6576b87f9c-dbmt2\" (UID: \"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223901 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7n96\" (UniqueName: \"kubernetes.io/projected/6f227808-18ba-4538-aed5-d994ba07c2fb-kube-api-access-v7n96\") pod \"machine-approver-56656f9798-c4hjg\" (UID: \"6f227808-18ba-4538-aed5-d994ba07c2fb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4hjg" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223915 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/591acb44-aa58-4103-98f7-b68e067bc90d-console-serving-cert\") pod \"console-f9d7485db-bnbps\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223928 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-console-config\") pod \"console-f9d7485db-bnbps\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223942 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94f516db-b964-4e71-9fdf-8276800923ad-serving-cert\") pod \"console-operator-58897d9998-nwldz\" (UID: \"94f516db-b964-4e71-9fdf-8276800923ad\") " pod="openshift-console-operator/console-operator-58897d9998-nwldz" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223957 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc570c7d-b32c-4d0b-ba06-73b13143cd0c-config\") pod \"authentication-operator-69f744f599-222p4\" (UID: \"fc570c7d-b32c-4d0b-ba06-73b13143cd0c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-222p4" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223971 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc570c7d-b32c-4d0b-ba06-73b13143cd0c-serving-cert\") pod \"authentication-operator-69f744f599-222p4\" (UID: \"fc570c7d-b32c-4d0b-ba06-73b13143cd0c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-222p4" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.223986 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kql9\" (UniqueName: \"kubernetes.io/projected/591acb44-aa58-4103-98f7-b68e067bc90d-kube-api-access-2kql9\") pod \"console-f9d7485db-bnbps\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.224000 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7da01014-205a-4c43-8640-653fd3b65c0a-service-ca-bundle\") pod \"router-default-5444994796-shn64\" (UID: \"7da01014-205a-4c43-8640-653fd3b65c0a\") " pod="openshift-ingress/router-default-5444994796-shn64" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.224015 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9zgs\" (UniqueName: \"kubernetes.io/projected/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-kube-api-access-w9zgs\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.224030 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d7a9657-cc3a-4c17-9312-9c7242ca9e95-serving-cert\") pod \"openshift-config-operator-7777fb866f-tnf7n\" (UID: \"1d7a9657-cc3a-4c17-9312-9c7242ca9e95\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tnf7n" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.224044 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c150ea9-cd3d-4dab-9701-e7bfef917b0e-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-md22d\" (UID: \"8c150ea9-cd3d-4dab-9701-e7bfef917b0e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-md22d" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.224059 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-config\") pod \"route-controller-manager-6576b87f9c-dbmt2\" (UID: \"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.224075 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b8c7190-1906-4f8f-b93a-85ad0b277e8f-serving-cert\") pod \"service-ca-operator-777779d784-vtztn\" (UID: \"3b8c7190-1906-4f8f-b93a-85ad0b277e8f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vtztn" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.224081 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kqlm6"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.224089 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/6f227808-18ba-4538-aed5-d994ba07c2fb-machine-approver-tls\") pod \"machine-approver-56656f9798-c4hjg\" (UID: \"6f227808-18ba-4538-aed5-d994ba07c2fb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4hjg" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.224165 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-service-ca\") pod \"console-f9d7485db-bnbps\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.224188 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/1d7a9657-cc3a-4c17-9312-9c7242ca9e95-available-featuregates\") pod \"openshift-config-operator-7777fb866f-tnf7n\" (UID: \"1d7a9657-cc3a-4c17-9312-9c7242ca9e95\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tnf7n" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.224216 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnpwh\" (UniqueName: \"kubernetes.io/projected/94f516db-b964-4e71-9fdf-8276800923ad-kube-api-access-gnpwh\") pod \"console-operator-58897d9998-nwldz\" (UID: \"94f516db-b964-4e71-9fdf-8276800923ad\") " pod="openshift-console-operator/console-operator-58897d9998-nwldz" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.224236 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f227808-18ba-4538-aed5-d994ba07c2fb-config\") pod \"machine-approver-56656f9798-c4hjg\" (UID: \"6f227808-18ba-4538-aed5-d994ba07c2fb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4hjg" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.224251 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c150ea9-cd3d-4dab-9701-e7bfef917b0e-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-md22d\" (UID: \"8c150ea9-cd3d-4dab-9701-e7bfef917b0e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-md22d" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.224265 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b8c7190-1906-4f8f-b93a-85ad0b277e8f-config\") pod \"service-ca-operator-777779d784-vtztn\" (UID: \"3b8c7190-1906-4f8f-b93a-85ad0b277e8f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vtztn" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.224281 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rr8fb\" (UniqueName: \"kubernetes.io/projected/3b8c7190-1906-4f8f-b93a-85ad0b277e8f-kube-api-access-rr8fb\") pod \"service-ca-operator-777779d784-vtztn\" (UID: \"3b8c7190-1906-4f8f-b93a-85ad0b277e8f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vtztn" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.224296 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-serving-cert\") pod \"route-controller-manager-6576b87f9c-dbmt2\" (UID: \"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.224310 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jx5sx\" (UniqueName: \"kubernetes.io/projected/1d7a9657-cc3a-4c17-9312-9c7242ca9e95-kube-api-access-jx5sx\") pod \"openshift-config-operator-7777fb866f-tnf7n\" (UID: \"1d7a9657-cc3a-4c17-9312-9c7242ca9e95\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tnf7n" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.224332 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b5dd7e68-1886-4112-9923-bc135c6d5302-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-nkksl\" (UID: \"b5dd7e68-1886-4112-9923-bc135c6d5302\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nkksl" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.224345 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5731cb34-0698-48be-a662-79dd89e808b2-serving-cert\") pod \"controller-manager-879f6c89f-mqx4x\" (UID: \"5731cb34-0698-48be-a662-79dd89e808b2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.225292 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.227149 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-6226h"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.228202 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.228929 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-qz8x2"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.231824 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ngdlf"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.236467 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-qfb5h"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.238077 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-nzndp"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.238788 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-nzndp" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.239301 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-mzsmw"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.240710 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7ldmq"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.241857 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-nkksl"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.243079 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-p6wgf"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.244077 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f"] Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.249356 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.268829 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.288755 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.309121 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.325265 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/34ae1ed8-796d-4296-b003-aed0d5b82e52-metrics-tls\") pod \"ingress-operator-5b745b69d9-zcpdn\" (UID: \"34ae1ed8-796d-4296-b003-aed0d5b82e52\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zcpdn" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.325317 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/560c851e-0596-4c5f-9191-a7201149e335-auth-proxy-config\") pod \"machine-config-operator-74547568cd-x28q5\" (UID: \"560c851e-0596-4c5f-9191-a7201149e335\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x28q5" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.325355 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7da01014-205a-4c43-8640-653fd3b65c0a-metrics-certs\") pod \"router-default-5444994796-shn64\" (UID: \"7da01014-205a-4c43-8640-653fd3b65c0a\") " pod="openshift-ingress/router-default-5444994796-shn64" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.325388 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/34ae1ed8-796d-4296-b003-aed0d5b82e52-bound-sa-token\") pod \"ingress-operator-5b745b69d9-zcpdn\" (UID: \"34ae1ed8-796d-4296-b003-aed0d5b82e52\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zcpdn" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.325420 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/34ae1ed8-796d-4296-b003-aed0d5b82e52-trusted-ca\") pod \"ingress-operator-5b745b69d9-zcpdn\" (UID: \"34ae1ed8-796d-4296-b003-aed0d5b82e52\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zcpdn" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.325456 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqrd7\" (UniqueName: \"kubernetes.io/projected/560c851e-0596-4c5f-9191-a7201149e335-kube-api-access-tqrd7\") pod \"machine-config-operator-74547568cd-x28q5\" (UID: \"560c851e-0596-4c5f-9191-a7201149e335\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x28q5" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.325488 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.325541 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rz5dz\" (UniqueName: \"kubernetes.io/projected/8c150ea9-cd3d-4dab-9701-e7bfef917b0e-kube-api-access-rz5dz\") pod \"kube-storage-version-migrator-operator-b67b599dd-md22d\" (UID: \"8c150ea9-cd3d-4dab-9701-e7bfef917b0e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-md22d" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.325573 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-oauth-serving-cert\") pod \"console-f9d7485db-bnbps\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.325603 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/7da01014-205a-4c43-8640-653fd3b65c0a-stats-auth\") pod \"router-default-5444994796-shn64\" (UID: \"7da01014-205a-4c43-8640-653fd3b65c0a\") " pod="openshift-ingress/router-default-5444994796-shn64" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.325680 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/94f516db-b964-4e71-9fdf-8276800923ad-trusted-ca\") pod \"console-operator-58897d9998-nwldz\" (UID: \"94f516db-b964-4e71-9fdf-8276800923ad\") " pod="openshift-console-operator/console-operator-58897d9998-nwldz" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.325711 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.325742 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.325787 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a1b696bc-9f56-41ca-a537-532c2575e5d0-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-ctkvv\" (UID: \"a1b696bc-9f56-41ca-a537-532c2575e5d0\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ctkvv" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.325818 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/147f1c2a-2e7c-4443-a428-1ac5222baf00-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-plvzs\" (UID: \"147f1c2a-2e7c-4443-a428-1ac5222baf00\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-plvzs" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.325849 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/0c21d97e-1221-464d-ae54-56ea6e626e00-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-sppsj\" (UID: \"0c21d97e-1221-464d-ae54-56ea6e626e00\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-sppsj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.325883 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qjbq\" (UniqueName: \"kubernetes.io/projected/7da01014-205a-4c43-8640-653fd3b65c0a-kube-api-access-7qjbq\") pod \"router-default-5444994796-shn64\" (UID: \"7da01014-205a-4c43-8640-653fd3b65c0a\") " pod="openshift-ingress/router-default-5444994796-shn64" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.325940 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qk9cx\" (UniqueName: \"kubernetes.io/projected/9a25b149-9f1f-4723-9c39-e6070685311c-kube-api-access-qk9cx\") pod \"dns-operator-744455d44c-2bwjb\" (UID: \"9a25b149-9f1f-4723-9c39-e6070685311c\") " pod="openshift-dns-operator/dns-operator-744455d44c-2bwjb" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.325970 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/02fc1037-823e-4082-b228-eb5e00b98f3b-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-x4m4n\" (UID: \"02fc1037-823e-4082-b228-eb5e00b98f3b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-x4m4n" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326002 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tsk7\" (UniqueName: \"kubernetes.io/projected/fc570c7d-b32c-4d0b-ba06-73b13143cd0c-kube-api-access-5tsk7\") pod \"authentication-operator-69f744f599-222p4\" (UID: \"fc570c7d-b32c-4d0b-ba06-73b13143cd0c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-222p4" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326032 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fc570c7d-b32c-4d0b-ba06-73b13143cd0c-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-222p4\" (UID: \"fc570c7d-b32c-4d0b-ba06-73b13143cd0c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-222p4" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326068 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6s6d\" (UniqueName: \"kubernetes.io/projected/5d412a34-f608-4b2d-8485-197efa42d0f9-kube-api-access-w6s6d\") pod \"downloads-7954f5f757-2nj65\" (UID: \"5d412a34-f608-4b2d-8485-197efa42d0f9\") " pod="openshift-console/downloads-7954f5f757-2nj65" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326101 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/591acb44-aa58-4103-98f7-b68e067bc90d-console-oauth-config\") pod \"console-f9d7485db-bnbps\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326131 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326168 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6f227808-18ba-4538-aed5-d994ba07c2fb-auth-proxy-config\") pod \"machine-approver-56656f9798-c4hjg\" (UID: \"6f227808-18ba-4538-aed5-d994ba07c2fb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4hjg" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326199 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fc570c7d-b32c-4d0b-ba06-73b13143cd0c-service-ca-bundle\") pod \"authentication-operator-69f744f599-222p4\" (UID: \"fc570c7d-b32c-4d0b-ba06-73b13143cd0c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-222p4" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326231 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdwb6\" (UniqueName: \"kubernetes.io/projected/0c21d97e-1221-464d-ae54-56ea6e626e00-kube-api-access-qdwb6\") pod \"machine-api-operator-5694c8668f-sppsj\" (UID: \"0c21d97e-1221-464d-ae54-56ea6e626e00\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-sppsj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326450 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326487 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hd5s\" (UniqueName: \"kubernetes.io/projected/02fc1037-823e-4082-b228-eb5e00b98f3b-kube-api-access-4hd5s\") pod \"machine-config-controller-84d6567774-x4m4n\" (UID: \"02fc1037-823e-4082-b228-eb5e00b98f3b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-x4m4n" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326518 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5731cb34-0698-48be-a662-79dd89e808b2-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-mqx4x\" (UID: \"5731cb34-0698-48be-a662-79dd89e808b2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326549 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326583 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgslq\" (UniqueName: \"kubernetes.io/projected/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-kube-api-access-tgslq\") pod \"route-controller-manager-6576b87f9c-dbmt2\" (UID: \"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326616 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7n96\" (UniqueName: \"kubernetes.io/projected/6f227808-18ba-4538-aed5-d994ba07c2fb-kube-api-access-v7n96\") pod \"machine-approver-56656f9798-c4hjg\" (UID: \"6f227808-18ba-4538-aed5-d994ba07c2fb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4hjg" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326673 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/591acb44-aa58-4103-98f7-b68e067bc90d-console-serving-cert\") pod \"console-f9d7485db-bnbps\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326705 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-console-config\") pod \"console-f9d7485db-bnbps\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326733 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94f516db-b964-4e71-9fdf-8276800923ad-serving-cert\") pod \"console-operator-58897d9998-nwldz\" (UID: \"94f516db-b964-4e71-9fdf-8276800923ad\") " pod="openshift-console-operator/console-operator-58897d9998-nwldz" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326768 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc570c7d-b32c-4d0b-ba06-73b13143cd0c-config\") pod \"authentication-operator-69f744f599-222p4\" (UID: \"fc570c7d-b32c-4d0b-ba06-73b13143cd0c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-222p4" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326787 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/34ae1ed8-796d-4296-b003-aed0d5b82e52-trusted-ca\") pod \"ingress-operator-5b745b69d9-zcpdn\" (UID: \"34ae1ed8-796d-4296-b003-aed0d5b82e52\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zcpdn" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326796 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc570c7d-b32c-4d0b-ba06-73b13143cd0c-serving-cert\") pod \"authentication-operator-69f744f599-222p4\" (UID: \"fc570c7d-b32c-4d0b-ba06-73b13143cd0c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-222p4" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326842 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d7a9657-cc3a-4c17-9312-9c7242ca9e95-serving-cert\") pod \"openshift-config-operator-7777fb866f-tnf7n\" (UID: \"1d7a9657-cc3a-4c17-9312-9c7242ca9e95\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tnf7n" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326868 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kql9\" (UniqueName: \"kubernetes.io/projected/591acb44-aa58-4103-98f7-b68e067bc90d-kube-api-access-2kql9\") pod \"console-f9d7485db-bnbps\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326887 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7da01014-205a-4c43-8640-653fd3b65c0a-service-ca-bundle\") pod \"router-default-5444994796-shn64\" (UID: \"7da01014-205a-4c43-8640-653fd3b65c0a\") " pod="openshift-ingress/router-default-5444994796-shn64" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326902 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9zgs\" (UniqueName: \"kubernetes.io/projected/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-kube-api-access-w9zgs\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326922 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-config\") pod \"route-controller-manager-6576b87f9c-dbmt2\" (UID: \"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326939 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c150ea9-cd3d-4dab-9701-e7bfef917b0e-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-md22d\" (UID: \"8c150ea9-cd3d-4dab-9701-e7bfef917b0e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-md22d" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326955 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/6f227808-18ba-4538-aed5-d994ba07c2fb-machine-approver-tls\") pod \"machine-approver-56656f9798-c4hjg\" (UID: \"6f227808-18ba-4538-aed5-d994ba07c2fb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4hjg" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326971 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-service-ca\") pod \"console-f9d7485db-bnbps\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.326985 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b8c7190-1906-4f8f-b93a-85ad0b277e8f-serving-cert\") pod \"service-ca-operator-777779d784-vtztn\" (UID: \"3b8c7190-1906-4f8f-b93a-85ad0b277e8f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vtztn" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327007 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/1d7a9657-cc3a-4c17-9312-9c7242ca9e95-available-featuregates\") pod \"openshift-config-operator-7777fb866f-tnf7n\" (UID: \"1d7a9657-cc3a-4c17-9312-9c7242ca9e95\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tnf7n" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327013 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/560c851e-0596-4c5f-9191-a7201149e335-auth-proxy-config\") pod \"machine-config-operator-74547568cd-x28q5\" (UID: \"560c851e-0596-4c5f-9191-a7201149e335\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x28q5" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327043 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnpwh\" (UniqueName: \"kubernetes.io/projected/94f516db-b964-4e71-9fdf-8276800923ad-kube-api-access-gnpwh\") pod \"console-operator-58897d9998-nwldz\" (UID: \"94f516db-b964-4e71-9fdf-8276800923ad\") " pod="openshift-console-operator/console-operator-58897d9998-nwldz" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327061 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f227808-18ba-4538-aed5-d994ba07c2fb-config\") pod \"machine-approver-56656f9798-c4hjg\" (UID: \"6f227808-18ba-4538-aed5-d994ba07c2fb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4hjg" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327129 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c150ea9-cd3d-4dab-9701-e7bfef917b0e-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-md22d\" (UID: \"8c150ea9-cd3d-4dab-9701-e7bfef917b0e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-md22d" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327216 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-serving-cert\") pod \"route-controller-manager-6576b87f9c-dbmt2\" (UID: \"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327266 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jx5sx\" (UniqueName: \"kubernetes.io/projected/1d7a9657-cc3a-4c17-9312-9c7242ca9e95-kube-api-access-jx5sx\") pod \"openshift-config-operator-7777fb866f-tnf7n\" (UID: \"1d7a9657-cc3a-4c17-9312-9c7242ca9e95\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tnf7n" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327316 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b8c7190-1906-4f8f-b93a-85ad0b277e8f-config\") pod \"service-ca-operator-777779d784-vtztn\" (UID: \"3b8c7190-1906-4f8f-b93a-85ad0b277e8f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vtztn" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327363 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rr8fb\" (UniqueName: \"kubernetes.io/projected/3b8c7190-1906-4f8f-b93a-85ad0b277e8f-kube-api-access-rr8fb\") pod \"service-ca-operator-777779d784-vtztn\" (UID: \"3b8c7190-1906-4f8f-b93a-85ad0b277e8f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vtztn" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327419 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f227808-18ba-4538-aed5-d994ba07c2fb-config\") pod \"machine-approver-56656f9798-c4hjg\" (UID: \"6f227808-18ba-4538-aed5-d994ba07c2fb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4hjg" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327415 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b5dd7e68-1886-4112-9923-bc135c6d5302-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-nkksl\" (UID: \"b5dd7e68-1886-4112-9923-bc135c6d5302\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nkksl" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327461 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5731cb34-0698-48be-a662-79dd89e808b2-serving-cert\") pod \"controller-manager-879f6c89f-mqx4x\" (UID: \"5731cb34-0698-48be-a662-79dd89e808b2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327501 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327519 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/560c851e-0596-4c5f-9191-a7201149e335-images\") pod \"machine-config-operator-74547568cd-x28q5\" (UID: \"560c851e-0596-4c5f-9191-a7201149e335\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x28q5" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327538 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hw5v8\" (UniqueName: \"kubernetes.io/projected/b5dd7e68-1886-4112-9923-bc135c6d5302-kube-api-access-hw5v8\") pod \"multus-admission-controller-857f4d67dd-nkksl\" (UID: \"b5dd7e68-1886-4112-9923-bc135c6d5302\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nkksl" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327554 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9a25b149-9f1f-4723-9c39-e6070685311c-metrics-tls\") pod \"dns-operator-744455d44c-2bwjb\" (UID: \"9a25b149-9f1f-4723-9c39-e6070685311c\") " pod="openshift-dns-operator/dns-operator-744455d44c-2bwjb" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327593 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v55jq\" (UniqueName: \"kubernetes.io/projected/147f1c2a-2e7c-4443-a428-1ac5222baf00-kube-api-access-v55jq\") pod \"openshift-controller-manager-operator-756b6f6bc6-plvzs\" (UID: \"147f1c2a-2e7c-4443-a428-1ac5222baf00\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-plvzs" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327610 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5731cb34-0698-48be-a662-79dd89e808b2-config\") pod \"controller-manager-879f6c89f-mqx4x\" (UID: \"5731cb34-0698-48be-a662-79dd89e808b2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327653 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94f516db-b964-4e71-9fdf-8276800923ad-config\") pod \"console-operator-58897d9998-nwldz\" (UID: \"94f516db-b964-4e71-9fdf-8276800923ad\") " pod="openshift-console-operator/console-operator-58897d9998-nwldz" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327676 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgrr5\" (UniqueName: \"kubernetes.io/projected/34ae1ed8-796d-4296-b003-aed0d5b82e52-kube-api-access-cgrr5\") pod \"ingress-operator-5b745b69d9-zcpdn\" (UID: \"34ae1ed8-796d-4296-b003-aed0d5b82e52\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zcpdn" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327693 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/02fc1037-823e-4082-b228-eb5e00b98f3b-proxy-tls\") pod \"machine-config-controller-84d6567774-x4m4n\" (UID: \"02fc1037-823e-4082-b228-eb5e00b98f3b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-x4m4n" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327709 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/147f1c2a-2e7c-4443-a428-1ac5222baf00-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-plvzs\" (UID: \"147f1c2a-2e7c-4443-a428-1ac5222baf00\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-plvzs" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327724 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pr5d\" (UniqueName: \"kubernetes.io/projected/5731cb34-0698-48be-a662-79dd89e808b2-kube-api-access-2pr5d\") pod \"controller-manager-879f6c89f-mqx4x\" (UID: \"5731cb34-0698-48be-a662-79dd89e808b2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327740 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-audit-dir\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327760 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/560c851e-0596-4c5f-9191-a7201149e335-proxy-tls\") pod \"machine-config-operator-74547568cd-x28q5\" (UID: \"560c851e-0596-4c5f-9191-a7201149e335\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x28q5" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327777 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-audit-policies\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327795 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0c21d97e-1221-464d-ae54-56ea6e626e00-images\") pod \"machine-api-operator-5694c8668f-sppsj\" (UID: \"0c21d97e-1221-464d-ae54-56ea6e626e00\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-sppsj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327811 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327829 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-client-ca\") pod \"route-controller-manager-6576b87f9c-dbmt2\" (UID: \"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327844 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/7da01014-205a-4c43-8640-653fd3b65c0a-default-certificate\") pod \"router-default-5444994796-shn64\" (UID: \"7da01014-205a-4c43-8640-653fd3b65c0a\") " pod="openshift-ingress/router-default-5444994796-shn64" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327860 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5731cb34-0698-48be-a662-79dd89e808b2-client-ca\") pod \"controller-manager-879f6c89f-mqx4x\" (UID: \"5731cb34-0698-48be-a662-79dd89e808b2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327876 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327897 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mlp2\" (UniqueName: \"kubernetes.io/projected/a1b696bc-9f56-41ca-a537-532c2575e5d0-kube-api-access-5mlp2\") pod \"cluster-samples-operator-665b6dd947-ctkvv\" (UID: \"a1b696bc-9f56-41ca-a537-532c2575e5d0\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ctkvv" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327913 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-trusted-ca-bundle\") pod \"console-f9d7485db-bnbps\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327940 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c21d97e-1221-464d-ae54-56ea6e626e00-config\") pod \"machine-api-operator-5694c8668f-sppsj\" (UID: \"0c21d97e-1221-464d-ae54-56ea6e626e00\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-sppsj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327957 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327974 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.328279 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/94f516db-b964-4e71-9fdf-8276800923ad-trusted-ca\") pod \"console-operator-58897d9998-nwldz\" (UID: \"94f516db-b964-4e71-9fdf-8276800923ad\") " pod="openshift-console-operator/console-operator-58897d9998-nwldz" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.328378 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-oauth-serving-cert\") pod \"console-f9d7485db-bnbps\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.328960 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fc570c7d-b32c-4d0b-ba06-73b13143cd0c-service-ca-bundle\") pod \"authentication-operator-69f744f599-222p4\" (UID: \"fc570c7d-b32c-4d0b-ba06-73b13143cd0c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-222p4" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.327271 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.330256 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc570c7d-b32c-4d0b-ba06-73b13143cd0c-serving-cert\") pod \"authentication-operator-69f744f599-222p4\" (UID: \"fc570c7d-b32c-4d0b-ba06-73b13143cd0c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-222p4" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.330847 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc570c7d-b32c-4d0b-ba06-73b13143cd0c-config\") pod \"authentication-operator-69f744f599-222p4\" (UID: \"fc570c7d-b32c-4d0b-ba06-73b13143cd0c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-222p4" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.330944 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-audit-dir\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.331203 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/1d7a9657-cc3a-4c17-9312-9c7242ca9e95-available-featuregates\") pod \"openshift-config-operator-7777fb866f-tnf7n\" (UID: \"1d7a9657-cc3a-4c17-9312-9c7242ca9e95\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tnf7n" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.331869 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.332122 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-audit-policies\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.333467 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-config\") pod \"route-controller-manager-6576b87f9c-dbmt2\" (UID: \"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.333496 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5731cb34-0698-48be-a662-79dd89e808b2-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-mqx4x\" (UID: \"5731cb34-0698-48be-a662-79dd89e808b2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.333750 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5731cb34-0698-48be-a662-79dd89e808b2-config\") pod \"controller-manager-879f6c89f-mqx4x\" (UID: \"5731cb34-0698-48be-a662-79dd89e808b2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.334102 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-console-config\") pod \"console-f9d7485db-bnbps\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.334529 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.334574 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/591acb44-aa58-4103-98f7-b68e067bc90d-console-serving-cert\") pod \"console-f9d7485db-bnbps\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.335013 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.335085 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94f516db-b964-4e71-9fdf-8276800923ad-config\") pod \"console-operator-58897d9998-nwldz\" (UID: \"94f516db-b964-4e71-9fdf-8276800923ad\") " pod="openshift-console-operator/console-operator-58897d9998-nwldz" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.335173 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0c21d97e-1221-464d-ae54-56ea6e626e00-images\") pod \"machine-api-operator-5694c8668f-sppsj\" (UID: \"0c21d97e-1221-464d-ae54-56ea6e626e00\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-sppsj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.335827 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6f227808-18ba-4538-aed5-d994ba07c2fb-auth-proxy-config\") pod \"machine-approver-56656f9798-c4hjg\" (UID: \"6f227808-18ba-4538-aed5-d994ba07c2fb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4hjg" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.335989 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fc570c7d-b32c-4d0b-ba06-73b13143cd0c-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-222p4\" (UID: \"fc570c7d-b32c-4d0b-ba06-73b13143cd0c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-222p4" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.336036 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/34ae1ed8-796d-4296-b003-aed0d5b82e52-metrics-tls\") pod \"ingress-operator-5b745b69d9-zcpdn\" (UID: \"34ae1ed8-796d-4296-b003-aed0d5b82e52\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zcpdn" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.337055 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-trusted-ca-bundle\") pod \"console-f9d7485db-bnbps\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.337419 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.337466 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c21d97e-1221-464d-ae54-56ea6e626e00-config\") pod \"machine-api-operator-5694c8668f-sppsj\" (UID: \"0c21d97e-1221-464d-ae54-56ea6e626e00\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-sppsj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.337750 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5731cb34-0698-48be-a662-79dd89e808b2-client-ca\") pod \"controller-manager-879f6c89f-mqx4x\" (UID: \"5731cb34-0698-48be-a662-79dd89e808b2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.337739 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-serving-cert\") pod \"route-controller-manager-6576b87f9c-dbmt2\" (UID: \"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.337836 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/02fc1037-823e-4082-b228-eb5e00b98f3b-proxy-tls\") pod \"machine-config-controller-84d6567774-x4m4n\" (UID: \"02fc1037-823e-4082-b228-eb5e00b98f3b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-x4m4n" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.337931 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-client-ca\") pod \"route-controller-manager-6576b87f9c-dbmt2\" (UID: \"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.338044 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.338295 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d7a9657-cc3a-4c17-9312-9c7242ca9e95-serving-cert\") pod \"openshift-config-operator-7777fb866f-tnf7n\" (UID: \"1d7a9657-cc3a-4c17-9312-9c7242ca9e95\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tnf7n" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.338408 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a1b696bc-9f56-41ca-a537-532c2575e5d0-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-ctkvv\" (UID: \"a1b696bc-9f56-41ca-a537-532c2575e5d0\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ctkvv" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.338421 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-service-ca\") pod \"console-f9d7485db-bnbps\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.338523 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5731cb34-0698-48be-a662-79dd89e808b2-serving-cert\") pod \"controller-manager-879f6c89f-mqx4x\" (UID: \"5731cb34-0698-48be-a662-79dd89e808b2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.338791 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.339168 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.339212 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/02fc1037-823e-4082-b228-eb5e00b98f3b-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-x4m4n\" (UID: \"02fc1037-823e-4082-b228-eb5e00b98f3b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-x4m4n" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.339433 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/7da01014-205a-4c43-8640-653fd3b65c0a-default-certificate\") pod \"router-default-5444994796-shn64\" (UID: \"7da01014-205a-4c43-8640-653fd3b65c0a\") " pod="openshift-ingress/router-default-5444994796-shn64" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.340253 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.340574 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.340841 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/591acb44-aa58-4103-98f7-b68e067bc90d-console-oauth-config\") pod \"console-f9d7485db-bnbps\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.341152 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.341217 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94f516db-b964-4e71-9fdf-8276800923ad-serving-cert\") pod \"console-operator-58897d9998-nwldz\" (UID: \"94f516db-b964-4e71-9fdf-8276800923ad\") " pod="openshift-console-operator/console-operator-58897d9998-nwldz" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.341757 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9a25b149-9f1f-4723-9c39-e6070685311c-metrics-tls\") pod \"dns-operator-744455d44c-2bwjb\" (UID: \"9a25b149-9f1f-4723-9c39-e6070685311c\") " pod="openshift-dns-operator/dns-operator-744455d44c-2bwjb" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.341822 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.342209 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/7da01014-205a-4c43-8640-653fd3b65c0a-stats-auth\") pod \"router-default-5444994796-shn64\" (UID: \"7da01014-205a-4c43-8640-653fd3b65c0a\") " pod="openshift-ingress/router-default-5444994796-shn64" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.342242 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/6f227808-18ba-4538-aed5-d994ba07c2fb-machine-approver-tls\") pod \"machine-approver-56656f9798-c4hjg\" (UID: \"6f227808-18ba-4538-aed5-d994ba07c2fb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4hjg" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.345424 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/0c21d97e-1221-464d-ae54-56ea6e626e00-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-sppsj\" (UID: \"0c21d97e-1221-464d-ae54-56ea6e626e00\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-sppsj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.349799 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.358713 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7da01014-205a-4c43-8640-653fd3b65c0a-metrics-certs\") pod \"router-default-5444994796-shn64\" (UID: \"7da01014-205a-4c43-8640-653fd3b65c0a\") " pod="openshift-ingress/router-default-5444994796-shn64" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.369121 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.388688 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.392553 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7da01014-205a-4c43-8640-653fd3b65c0a-service-ca-bundle\") pod \"router-default-5444994796-shn64\" (UID: \"7da01014-205a-4c43-8640-653fd3b65c0a\") " pod="openshift-ingress/router-default-5444994796-shn64" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.409373 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.429947 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.449130 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.469732 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.489061 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.502503 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c150ea9-cd3d-4dab-9701-e7bfef917b0e-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-md22d\" (UID: \"8c150ea9-cd3d-4dab-9701-e7bfef917b0e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-md22d" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.510917 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.521385 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c150ea9-cd3d-4dab-9701-e7bfef917b0e-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-md22d\" (UID: \"8c150ea9-cd3d-4dab-9701-e7bfef917b0e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-md22d" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.529196 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.550121 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.557407 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/147f1c2a-2e7c-4443-a428-1ac5222baf00-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-plvzs\" (UID: \"147f1c2a-2e7c-4443-a428-1ac5222baf00\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-plvzs" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.569530 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.589020 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.595934 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/147f1c2a-2e7c-4443-a428-1ac5222baf00-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-plvzs\" (UID: \"147f1c2a-2e7c-4443-a428-1ac5222baf00\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-plvzs" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.609280 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.629813 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.649280 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.661853 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b5dd7e68-1886-4112-9923-bc135c6d5302-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-nkksl\" (UID: \"b5dd7e68-1886-4112-9923-bc135c6d5302\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nkksl" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.670104 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.672355 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/560c851e-0596-4c5f-9191-a7201149e335-images\") pod \"machine-config-operator-74547568cd-x28q5\" (UID: \"560c851e-0596-4c5f-9191-a7201149e335\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x28q5" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.689201 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.710121 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.729582 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.736521 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/560c851e-0596-4c5f-9191-a7201149e335-proxy-tls\") pod \"machine-config-operator-74547568cd-x28q5\" (UID: \"560c851e-0596-4c5f-9191-a7201149e335\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x28q5" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.749355 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.769868 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.789705 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.794280 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b8c7190-1906-4f8f-b93a-85ad0b277e8f-serving-cert\") pod \"service-ca-operator-777779d784-vtztn\" (UID: \"3b8c7190-1906-4f8f-b93a-85ad0b277e8f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vtztn" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.809564 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.811019 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b8c7190-1906-4f8f-b93a-85ad0b277e8f-config\") pod \"service-ca-operator-777779d784-vtztn\" (UID: \"3b8c7190-1906-4f8f-b93a-85ad0b277e8f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vtztn" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.829516 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.868160 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gn6jx\" (UniqueName: \"kubernetes.io/projected/44d90b05-9404-455e-9bc4-17f416ed4b27-kube-api-access-gn6jx\") pod \"openshift-apiserver-operator-796bbdcf4f-cl7kj\" (UID: \"44d90b05-9404-455e-9bc4-17f416ed4b27\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cl7kj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.893614 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kcdc\" (UniqueName: \"kubernetes.io/projected/021ef5bd-da08-44a2-8cf9-47cae92d4c47-kube-api-access-9kcdc\") pod \"apiserver-7bbb656c7d-kh62k\" (UID: \"021ef5bd-da08-44a2-8cf9-47cae92d4c47\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.907056 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.928337 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgb5k\" (UniqueName: \"kubernetes.io/projected/7824f777-df2b-46ba-a9ea-6a428351d121-kube-api-access-cgb5k\") pod \"apiserver-76f77b778f-gtlzc\" (UID: \"7824f777-df2b-46ba-a9ea-6a428351d121\") " pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.930050 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cl7kj" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.948462 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.970276 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 01 05:31:31 crc kubenswrapper[4661]: I1001 05:31:31.989433 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.009309 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.029825 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.049395 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.069432 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.089153 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.109687 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.127616 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k"] Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.129710 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.148948 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 01 05:31:32 crc kubenswrapper[4661]: W1001 05:31:32.150286 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod021ef5bd_da08_44a2_8cf9_47cae92d4c47.slice/crio-82d566fa297c2d12bc7cffe86658efa095bf24b0f651565c711e53c35938821f WatchSource:0}: Error finding container 82d566fa297c2d12bc7cffe86658efa095bf24b0f651565c711e53c35938821f: Status 404 returned error can't find the container with id 82d566fa297c2d12bc7cffe86658efa095bf24b0f651565c711e53c35938821f Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.158656 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cl7kj"] Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.167132 4661 request.go:700] Waited for 1.00782756s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/secrets?fieldSelector=metadata.name%3Dmarketplace-operator-metrics&limit=500&resourceVersion=0 Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.168806 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.189426 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.208615 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.220002 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.229094 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.254109 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.269624 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.289035 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.311673 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.329777 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.349670 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.369830 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.390290 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.409866 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.412665 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-gtlzc"] Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.429488 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 01 05:31:32 crc kubenswrapper[4661]: W1001 05:31:32.432172 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7824f777_df2b_46ba_a9ea_6a428351d121.slice/crio-d3dedad909c5178c6d6c62a364f98625bfc63443f04db474809b5e85475e5a6d WatchSource:0}: Error finding container d3dedad909c5178c6d6c62a364f98625bfc63443f04db474809b5e85475e5a6d: Status 404 returned error can't find the container with id d3dedad909c5178c6d6c62a364f98625bfc63443f04db474809b5e85475e5a6d Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.449252 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.471163 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.489416 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.509373 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.529749 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.535777 4661 generic.go:334] "Generic (PLEG): container finished" podID="021ef5bd-da08-44a2-8cf9-47cae92d4c47" containerID="f7d0337b5b0071c9943b7df4980f737b5d7b7ac3594ed8e46550fa85e0383c2a" exitCode=0 Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.535879 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" event={"ID":"021ef5bd-da08-44a2-8cf9-47cae92d4c47","Type":"ContainerDied","Data":"f7d0337b5b0071c9943b7df4980f737b5d7b7ac3594ed8e46550fa85e0383c2a"} Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.535922 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" event={"ID":"021ef5bd-da08-44a2-8cf9-47cae92d4c47","Type":"ContainerStarted","Data":"82d566fa297c2d12bc7cffe86658efa095bf24b0f651565c711e53c35938821f"} Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.538071 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cl7kj" event={"ID":"44d90b05-9404-455e-9bc4-17f416ed4b27","Type":"ContainerStarted","Data":"a264c76cfe1cd01e95a652e67c920d157b35c97ce3ae3245cefd359c0b278301"} Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.538097 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cl7kj" event={"ID":"44d90b05-9404-455e-9bc4-17f416ed4b27","Type":"ContainerStarted","Data":"3d902b03454c06ba81dfede838a7ab29c742fec300b44f85cf4c82941313e293"} Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.541145 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" event={"ID":"7824f777-df2b-46ba-a9ea-6a428351d121","Type":"ContainerStarted","Data":"d3dedad909c5178c6d6c62a364f98625bfc63443f04db474809b5e85475e5a6d"} Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.550739 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.568569 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.589311 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.609146 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.629434 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.649336 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.669984 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.690248 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.710351 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.732005 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.750659 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.771858 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.789835 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.814865 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.828881 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.849102 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.868929 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.889296 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.909808 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.930310 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.951106 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.969904 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 01 05:31:32 crc kubenswrapper[4661]: I1001 05:31:32.988328 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.008795 4661 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.030599 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.048986 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.069527 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.089834 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.133186 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/34ae1ed8-796d-4296-b003-aed0d5b82e52-bound-sa-token\") pod \"ingress-operator-5b745b69d9-zcpdn\" (UID: \"34ae1ed8-796d-4296-b003-aed0d5b82e52\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zcpdn" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.157097 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rz5dz\" (UniqueName: \"kubernetes.io/projected/8c150ea9-cd3d-4dab-9701-e7bfef917b0e-kube-api-access-rz5dz\") pod \"kube-storage-version-migrator-operator-b67b599dd-md22d\" (UID: \"8c150ea9-cd3d-4dab-9701-e7bfef917b0e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-md22d" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.168137 4661 request.go:700] Waited for 1.838449488s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-api/serviceaccounts/machine-api-operator/token Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.178389 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgslq\" (UniqueName: \"kubernetes.io/projected/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-kube-api-access-tgslq\") pod \"route-controller-manager-6576b87f9c-dbmt2\" (UID: \"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.197150 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdwb6\" (UniqueName: \"kubernetes.io/projected/0c21d97e-1221-464d-ae54-56ea6e626e00-kube-api-access-qdwb6\") pod \"machine-api-operator-5694c8668f-sppsj\" (UID: \"0c21d97e-1221-464d-ae54-56ea6e626e00\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-sppsj" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.208763 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7n96\" (UniqueName: \"kubernetes.io/projected/6f227808-18ba-4538-aed5-d994ba07c2fb-kube-api-access-v7n96\") pod \"machine-approver-56656f9798-c4hjg\" (UID: \"6f227808-18ba-4538-aed5-d994ba07c2fb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4hjg" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.227038 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqrd7\" (UniqueName: \"kubernetes.io/projected/560c851e-0596-4c5f-9191-a7201149e335-kube-api-access-tqrd7\") pod \"machine-config-operator-74547568cd-x28q5\" (UID: \"560c851e-0596-4c5f-9191-a7201149e335\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x28q5" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.254245 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rr8fb\" (UniqueName: \"kubernetes.io/projected/3b8c7190-1906-4f8f-b93a-85ad0b277e8f-kube-api-access-rr8fb\") pod \"service-ca-operator-777779d784-vtztn\" (UID: \"3b8c7190-1906-4f8f-b93a-85ad0b277e8f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vtztn" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.274751 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgrr5\" (UniqueName: \"kubernetes.io/projected/34ae1ed8-796d-4296-b003-aed0d5b82e52-kube-api-access-cgrr5\") pod \"ingress-operator-5b745b69d9-zcpdn\" (UID: \"34ae1ed8-796d-4296-b003-aed0d5b82e52\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zcpdn" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.286284 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.289809 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hw5v8\" (UniqueName: \"kubernetes.io/projected/b5dd7e68-1886-4112-9923-bc135c6d5302-kube-api-access-hw5v8\") pod \"multus-admission-controller-857f4d67dd-nkksl\" (UID: \"b5dd7e68-1886-4112-9923-bc135c6d5302\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nkksl" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.296711 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-sppsj" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.310358 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jx5sx\" (UniqueName: \"kubernetes.io/projected/1d7a9657-cc3a-4c17-9312-9c7242ca9e95-kube-api-access-jx5sx\") pod \"openshift-config-operator-7777fb866f-tnf7n\" (UID: \"1d7a9657-cc3a-4c17-9312-9c7242ca9e95\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tnf7n" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.327543 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zcpdn" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.342345 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pr5d\" (UniqueName: \"kubernetes.io/projected/5731cb34-0698-48be-a662-79dd89e808b2-kube-api-access-2pr5d\") pod \"controller-manager-879f6c89f-mqx4x\" (UID: \"5731cb34-0698-48be-a662-79dd89e808b2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.348352 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tnf7n" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.355464 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnpwh\" (UniqueName: \"kubernetes.io/projected/94f516db-b964-4e71-9fdf-8276800923ad-kube-api-access-gnpwh\") pod \"console-operator-58897d9998-nwldz\" (UID: \"94f516db-b964-4e71-9fdf-8276800923ad\") " pod="openshift-console-operator/console-operator-58897d9998-nwldz" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.384212 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kql9\" (UniqueName: \"kubernetes.io/projected/591acb44-aa58-4103-98f7-b68e067bc90d-kube-api-access-2kql9\") pod \"console-f9d7485db-bnbps\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.387033 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-md22d" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.401841 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-nkksl" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.402808 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9zgs\" (UniqueName: \"kubernetes.io/projected/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-kube-api-access-w9zgs\") pod \"oauth-openshift-558db77b4-j9mhf\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.410026 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x28q5" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.415216 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v55jq\" (UniqueName: \"kubernetes.io/projected/147f1c2a-2e7c-4443-a428-1ac5222baf00-kube-api-access-v55jq\") pod \"openshift-controller-manager-operator-756b6f6bc6-plvzs\" (UID: \"147f1c2a-2e7c-4443-a428-1ac5222baf00\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-plvzs" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.417937 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vtztn" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.428326 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hd5s\" (UniqueName: \"kubernetes.io/projected/02fc1037-823e-4082-b228-eb5e00b98f3b-kube-api-access-4hd5s\") pod \"machine-config-controller-84d6567774-x4m4n\" (UID: \"02fc1037-823e-4082-b228-eb5e00b98f3b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-x4m4n" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.447976 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.470062 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6s6d\" (UniqueName: \"kubernetes.io/projected/5d412a34-f608-4b2d-8485-197efa42d0f9-kube-api-access-w6s6d\") pod \"downloads-7954f5f757-2nj65\" (UID: \"5d412a34-f608-4b2d-8485-197efa42d0f9\") " pod="openshift-console/downloads-7954f5f757-2nj65" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.474000 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mlp2\" (UniqueName: \"kubernetes.io/projected/a1b696bc-9f56-41ca-a537-532c2575e5d0-kube-api-access-5mlp2\") pod \"cluster-samples-operator-665b6dd947-ctkvv\" (UID: \"a1b696bc-9f56-41ca-a537-532c2575e5d0\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ctkvv" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.480120 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4hjg" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.496552 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qk9cx\" (UniqueName: \"kubernetes.io/projected/9a25b149-9f1f-4723-9c39-e6070685311c-kube-api-access-qk9cx\") pod \"dns-operator-744455d44c-2bwjb\" (UID: \"9a25b149-9f1f-4723-9c39-e6070685311c\") " pod="openshift-dns-operator/dns-operator-744455d44c-2bwjb" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.505949 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qjbq\" (UniqueName: \"kubernetes.io/projected/7da01014-205a-4c43-8640-653fd3b65c0a-kube-api-access-7qjbq\") pod \"router-default-5444994796-shn64\" (UID: \"7da01014-205a-4c43-8640-653fd3b65c0a\") " pod="openshift-ingress/router-default-5444994796-shn64" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.521920 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.528482 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tsk7\" (UniqueName: \"kubernetes.io/projected/fc570c7d-b32c-4d0b-ba06-73b13143cd0c-kube-api-access-5tsk7\") pod \"authentication-operator-69f744f599-222p4\" (UID: \"fc570c7d-b32c-4d0b-ba06-73b13143cd0c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-222p4" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.561395 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2"] Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.565024 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0e2a7348-cb1f-4ad4-8e45-82e8386569a9-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rm9mt\" (UID: \"0e2a7348-cb1f-4ad4-8e45-82e8386569a9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rm9mt" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.565080 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.565104 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-registry-tls\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.565119 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.565136 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5rkl\" (UniqueName: \"kubernetes.io/projected/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-kube-api-access-r5rkl\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.565153 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-trusted-ca\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.565191 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjqs5\" (UniqueName: \"kubernetes.io/projected/0e2a7348-cb1f-4ad4-8e45-82e8386569a9-kube-api-access-sjqs5\") pod \"cluster-image-registry-operator-dc59b4c8b-rm9mt\" (UID: \"0e2a7348-cb1f-4ad4-8e45-82e8386569a9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rm9mt" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.565220 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-bound-sa-token\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.565251 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-registry-certificates\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.565277 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0e2a7348-cb1f-4ad4-8e45-82e8386569a9-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rm9mt\" (UID: \"0e2a7348-cb1f-4ad4-8e45-82e8386569a9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rm9mt" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.565326 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.565433 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0e2a7348-cb1f-4ad4-8e45-82e8386569a9-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rm9mt\" (UID: \"0e2a7348-cb1f-4ad4-8e45-82e8386569a9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rm9mt" Oct 01 05:31:33 crc kubenswrapper[4661]: E1001 05:31:33.566007 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:34.06599439 +0000 UTC m=+143.003973004 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.566258 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-222p4" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.593848 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-sppsj"] Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.607107 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ctkvv" Oct 01 05:31:33 crc kubenswrapper[4661]: W1001 05:31:33.609906 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod50a192ea_ce64_4d8c_b3e3_a19ef658aa2b.slice/crio-c8a9c7be568c3dab9bc9bbbf8e74b5f147d711e01b1ae3c06cf499fddebf9a1f WatchSource:0}: Error finding container c8a9c7be568c3dab9bc9bbbf8e74b5f147d711e01b1ae3c06cf499fddebf9a1f: Status 404 returned error can't find the container with id c8a9c7be568c3dab9bc9bbbf8e74b5f147d711e01b1ae3c06cf499fddebf9a1f Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.613160 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.620319 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-nwldz" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.646308 4661 generic.go:334] "Generic (PLEG): container finished" podID="7824f777-df2b-46ba-a9ea-6a428351d121" containerID="aac56e45908bca2926d96a4c5cf85eb65d7ba2a2cec29c95b69ad4b2ba3a88d0" exitCode=0 Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.647134 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" event={"ID":"7824f777-df2b-46ba-a9ea-6a428351d121","Type":"ContainerDied","Data":"aac56e45908bca2926d96a4c5cf85eb65d7ba2a2cec29c95b69ad4b2ba3a88d0"} Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.658989 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-tnf7n"] Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.659316 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-2bwjb" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.659983 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-x4m4n" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.667091 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.667350 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-bound-sa-token\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.667381 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f4b45a54-4eb0-4a00-8ba2-bde133539b24-metrics-tls\") pod \"dns-default-7mdcj\" (UID: \"f4b45a54-4eb0-4a00-8ba2-bde133539b24\") " pod="openshift-dns/dns-default-7mdcj" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.667409 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djzrj\" (UniqueName: \"kubernetes.io/projected/b3b2fb02-1431-4fd3-83e9-37770bb57825-kube-api-access-djzrj\") pod \"service-ca-9c57cc56f-qz8x2\" (UID: \"b3b2fb02-1431-4fd3-83e9-37770bb57825\") " pod="openshift-service-ca/service-ca-9c57cc56f-qz8x2" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.667436 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qxmb\" (UniqueName: \"kubernetes.io/projected/3b56c64d-0bba-4d20-a6ae-e6c9349d0c04-kube-api-access-9qxmb\") pod \"marketplace-operator-79b997595-r5ghr\" (UID: \"3b56c64d-0bba-4d20-a6ae-e6c9349d0c04\") " pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.667452 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90-webhook-cert\") pod \"packageserver-d55dfcdfc-knw9f\" (UID: \"a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.667509 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2k6j9\" (UniqueName: \"kubernetes.io/projected/a731ee04-6aba-49d9-b8b0-392d31d55da2-kube-api-access-2k6j9\") pod \"collect-profiles-29321610-wjxrz\" (UID: \"a731ee04-6aba-49d9-b8b0-392d31d55da2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.667526 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ee2a5e15-cecf-470a-9f59-acef3b3e87a6-srv-cert\") pod \"catalog-operator-68c6474976-zh9kb\" (UID: \"ee2a5e15-cecf-470a-9f59-acef3b3e87a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zh9kb" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.667557 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a731ee04-6aba-49d9-b8b0-392d31d55da2-config-volume\") pod \"collect-profiles-29321610-wjxrz\" (UID: \"a731ee04-6aba-49d9-b8b0-392d31d55da2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.667590 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-registry-certificates\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.667605 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/27b2dcf3-b40d-4c3d-a9a9-b68c77b96669-cert\") pod \"ingress-canary-6226h\" (UID: \"27b2dcf3-b40d-4c3d-a9a9-b68c77b96669\") " pod="openshift-ingress-canary/ingress-canary-6226h" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.667674 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0e2a7348-cb1f-4ad4-8e45-82e8386569a9-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rm9mt\" (UID: \"0e2a7348-cb1f-4ad4-8e45-82e8386569a9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rm9mt" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.667734 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/b0dc486f-e096-48b2-9708-dd01ff18cfef-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-5mmdv\" (UID: \"b0dc486f-e096-48b2-9708-dd01ff18cfef\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5mmdv" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.667771 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmwqq\" (UniqueName: \"kubernetes.io/projected/61ab52df-4ef3-4f24-a8f3-01c7fef8c99f-kube-api-access-hmwqq\") pod \"control-plane-machine-set-operator-78cbb6b69f-kqlm6\" (UID: \"61ab52df-4ef3-4f24-a8f3-01c7fef8c99f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kqlm6" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.667807 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/d5278166-ab19-4464-87d6-3fc6fe335855-plugins-dir\") pod \"csi-hostpathplugin-mzsmw\" (UID: \"d5278166-ab19-4464-87d6-3fc6fe335855\") " pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.667827 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f1f59b90-125d-4668-ba39-27c606b07de0-serving-cert\") pod \"etcd-operator-b45778765-qfb5h\" (UID: \"f1f59b90-125d-4668-ba39-27c606b07de0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.667847 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/276ccab1-820f-4a4f-81b2-fdca6aa59628-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-c5cbp\" (UID: \"276ccab1-820f-4a4f-81b2-fdca6aa59628\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c5cbp" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.667865 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrvlf\" (UniqueName: \"kubernetes.io/projected/ee2a5e15-cecf-470a-9f59-acef3b3e87a6-kube-api-access-qrvlf\") pod \"catalog-operator-68c6474976-zh9kb\" (UID: \"ee2a5e15-cecf-470a-9f59-acef3b3e87a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zh9kb" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.667969 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clr7k\" (UniqueName: \"kubernetes.io/projected/53107074-138a-4df9-ab42-9e09b5a257d3-kube-api-access-clr7k\") pod \"migrator-59844c95c7-p6wgf\" (UID: \"53107074-138a-4df9-ab42-9e09b5a257d3\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-p6wgf" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.668003 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ee2a5e15-cecf-470a-9f59-acef3b3e87a6-profile-collector-cert\") pod \"catalog-operator-68c6474976-zh9kb\" (UID: \"ee2a5e15-cecf-470a-9f59-acef3b3e87a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zh9kb" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.668024 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/f1f59b90-125d-4668-ba39-27c606b07de0-etcd-service-ca\") pod \"etcd-operator-b45778765-qfb5h\" (UID: \"f1f59b90-125d-4668-ba39-27c606b07de0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.668067 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/d5278166-ab19-4464-87d6-3fc6fe335855-mountpoint-dir\") pod \"csi-hostpathplugin-mzsmw\" (UID: \"d5278166-ab19-4464-87d6-3fc6fe335855\") " pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.668086 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/276ccab1-820f-4a4f-81b2-fdca6aa59628-config\") pod \"kube-apiserver-operator-766d6c64bb-c5cbp\" (UID: \"276ccab1-820f-4a4f-81b2-fdca6aa59628\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c5cbp" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.668115 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0e2a7348-cb1f-4ad4-8e45-82e8386569a9-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rm9mt\" (UID: \"0e2a7348-cb1f-4ad4-8e45-82e8386569a9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rm9mt" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.668135 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90-tmpfs\") pod \"packageserver-d55dfcdfc-knw9f\" (UID: \"a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.668168 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ed4a1d37-47d2-4949-8841-4ab7f929e6fa-certs\") pod \"machine-config-server-nzndp\" (UID: \"ed4a1d37-47d2-4949-8841-4ab7f929e6fa\") " pod="openshift-machine-config-operator/machine-config-server-nzndp" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.668190 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/653d6195-02ef-4ded-a397-9dd414f5a66e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-7ldmq\" (UID: \"653d6195-02ef-4ded-a397-9dd414f5a66e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7ldmq" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.668209 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slxxb\" (UniqueName: \"kubernetes.io/projected/27b2dcf3-b40d-4c3d-a9a9-b68c77b96669-kube-api-access-slxxb\") pod \"ingress-canary-6226h\" (UID: \"27b2dcf3-b40d-4c3d-a9a9-b68c77b96669\") " pod="openshift-ingress-canary/ingress-canary-6226h" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.668226 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f4b45a54-4eb0-4a00-8ba2-bde133539b24-config-volume\") pod \"dns-default-7mdcj\" (UID: \"f4b45a54-4eb0-4a00-8ba2-bde133539b24\") " pod="openshift-dns/dns-default-7mdcj" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.668269 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jr6fv\" (UniqueName: \"kubernetes.io/projected/653d6195-02ef-4ded-a397-9dd414f5a66e-kube-api-access-jr6fv\") pod \"olm-operator-6b444d44fb-7ldmq\" (UID: \"653d6195-02ef-4ded-a397-9dd414f5a66e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7ldmq" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.668299 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/d5278166-ab19-4464-87d6-3fc6fe335855-registration-dir\") pod \"csi-hostpathplugin-mzsmw\" (UID: \"d5278166-ab19-4464-87d6-3fc6fe335855\") " pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.668317 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90-apiservice-cert\") pod \"packageserver-d55dfcdfc-knw9f\" (UID: \"a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.668385 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ed4a1d37-47d2-4949-8841-4ab7f929e6fa-node-bootstrap-token\") pod \"machine-config-server-nzndp\" (UID: \"ed4a1d37-47d2-4949-8841-4ab7f929e6fa\") " pod="openshift-machine-config-operator/machine-config-server-nzndp" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.668417 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3b56c64d-0bba-4d20-a6ae-e6c9349d0c04-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-r5ghr\" (UID: \"3b56c64d-0bba-4d20-a6ae-e6c9349d0c04\") " pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.668451 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nllk4\" (UniqueName: \"kubernetes.io/projected/f4b45a54-4eb0-4a00-8ba2-bde133539b24-kube-api-access-nllk4\") pod \"dns-default-7mdcj\" (UID: \"f4b45a54-4eb0-4a00-8ba2-bde133539b24\") " pod="openshift-dns/dns-default-7mdcj" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.668499 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0e2a7348-cb1f-4ad4-8e45-82e8386569a9-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rm9mt\" (UID: \"0e2a7348-cb1f-4ad4-8e45-82e8386569a9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rm9mt" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.668543 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a988c7a-aae7-4f17-bdb0-8395fd40008d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-vpzn9\" (UID: \"6a988c7a-aae7-4f17-bdb0-8395fd40008d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vpzn9" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.668589 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wb45g\" (UniqueName: \"kubernetes.io/projected/f1f59b90-125d-4668-ba39-27c606b07de0-kube-api-access-wb45g\") pod \"etcd-operator-b45778765-qfb5h\" (UID: \"f1f59b90-125d-4668-ba39-27c606b07de0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.668610 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5lkw\" (UniqueName: \"kubernetes.io/projected/d5278166-ab19-4464-87d6-3fc6fe335855-kube-api-access-q5lkw\") pod \"csi-hostpathplugin-mzsmw\" (UID: \"d5278166-ab19-4464-87d6-3fc6fe335855\") " pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.668660 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/b3b2fb02-1431-4fd3-83e9-37770bb57825-signing-cabundle\") pod \"service-ca-9c57cc56f-qz8x2\" (UID: \"b3b2fb02-1431-4fd3-83e9-37770bb57825\") " pod="openshift-service-ca/service-ca-9c57cc56f-qz8x2" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.668680 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a731ee04-6aba-49d9-b8b0-392d31d55da2-secret-volume\") pod \"collect-profiles-29321610-wjxrz\" (UID: \"a731ee04-6aba-49d9-b8b0-392d31d55da2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz" Oct 01 05:31:33 crc kubenswrapper[4661]: E1001 05:31:33.670007 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:34.169988093 +0000 UTC m=+143.107966707 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.676906 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" event={"ID":"021ef5bd-da08-44a2-8cf9-47cae92d4c47","Type":"ContainerStarted","Data":"0bc309d17f3d3f2c5c6175f27e8bfd8255aba5515dfb881a776a20d88f7fc6e7"} Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.677862 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-registry-certificates\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.678888 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0e2a7348-cb1f-4ad4-8e45-82e8386569a9-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rm9mt\" (UID: \"0e2a7348-cb1f-4ad4-8e45-82e8386569a9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rm9mt" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.679351 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-shn64" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.682387 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-2nj65" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.682973 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.683029 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7c80f42-b230-4eb5-a285-b6e4a60a34ad-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ngdlf\" (UID: \"d7c80f42-b230-4eb5-a285-b6e4a60a34ad\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ngdlf" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.683098 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/61ab52df-4ef3-4f24-a8f3-01c7fef8c99f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-kqlm6\" (UID: \"61ab52df-4ef3-4f24-a8f3-01c7fef8c99f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kqlm6" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.683335 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/653d6195-02ef-4ded-a397-9dd414f5a66e-srv-cert\") pod \"olm-operator-6b444d44fb-7ldmq\" (UID: \"653d6195-02ef-4ded-a397-9dd414f5a66e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7ldmq" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.683370 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d7c80f42-b230-4eb5-a285-b6e4a60a34ad-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ngdlf\" (UID: \"d7c80f42-b230-4eb5-a285-b6e4a60a34ad\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ngdlf" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.683464 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3b56c64d-0bba-4d20-a6ae-e6c9349d0c04-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-r5ghr\" (UID: \"3b56c64d-0bba-4d20-a6ae-e6c9349d0c04\") " pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.683509 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6a988c7a-aae7-4f17-bdb0-8395fd40008d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-vpzn9\" (UID: \"6a988c7a-aae7-4f17-bdb0-8395fd40008d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vpzn9" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.683553 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-registry-tls\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.684351 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.684411 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.684468 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5rkl\" (UniqueName: \"kubernetes.io/projected/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-kube-api-access-r5rkl\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.684497 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/d5278166-ab19-4464-87d6-3fc6fe335855-socket-dir\") pod \"csi-hostpathplugin-mzsmw\" (UID: \"d5278166-ab19-4464-87d6-3fc6fe335855\") " pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.684687 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-trusted-ca\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.686817 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/b3b2fb02-1431-4fd3-83e9-37770bb57825-signing-key\") pod \"service-ca-9c57cc56f-qz8x2\" (UID: \"b3b2fb02-1431-4fd3-83e9-37770bb57825\") " pod="openshift-service-ca/service-ca-9c57cc56f-qz8x2" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.686839 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d7c80f42-b230-4eb5-a285-b6e4a60a34ad-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ngdlf\" (UID: \"d7c80f42-b230-4eb5-a285-b6e4a60a34ad\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ngdlf" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.686856 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlprf\" (UniqueName: \"kubernetes.io/projected/b0dc486f-e096-48b2-9708-dd01ff18cfef-kube-api-access-jlprf\") pod \"package-server-manager-789f6589d5-5mmdv\" (UID: \"b0dc486f-e096-48b2-9708-dd01ff18cfef\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5mmdv" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.686873 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1f59b90-125d-4668-ba39-27c606b07de0-config\") pod \"etcd-operator-b45778765-qfb5h\" (UID: \"f1f59b90-125d-4668-ba39-27c606b07de0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.686912 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a988c7a-aae7-4f17-bdb0-8395fd40008d-config\") pod \"kube-controller-manager-operator-78b949d7b-vpzn9\" (UID: \"6a988c7a-aae7-4f17-bdb0-8395fd40008d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vpzn9" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.686959 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/276ccab1-820f-4a4f-81b2-fdca6aa59628-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-c5cbp\" (UID: \"276ccab1-820f-4a4f-81b2-fdca6aa59628\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c5cbp" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.686977 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdtzc\" (UniqueName: \"kubernetes.io/projected/ed4a1d37-47d2-4949-8841-4ab7f929e6fa-kube-api-access-gdtzc\") pod \"machine-config-server-nzndp\" (UID: \"ed4a1d37-47d2-4949-8841-4ab7f929e6fa\") " pod="openshift-machine-config-operator/machine-config-server-nzndp" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.687008 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjqs5\" (UniqueName: \"kubernetes.io/projected/0e2a7348-cb1f-4ad4-8e45-82e8386569a9-kube-api-access-sjqs5\") pod \"cluster-image-registry-operator-dc59b4c8b-rm9mt\" (UID: \"0e2a7348-cb1f-4ad4-8e45-82e8386569a9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rm9mt" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.687042 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/d5278166-ab19-4464-87d6-3fc6fe335855-csi-data-dir\") pod \"csi-hostpathplugin-mzsmw\" (UID: \"d5278166-ab19-4464-87d6-3fc6fe335855\") " pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.687062 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89tsw\" (UniqueName: \"kubernetes.io/projected/a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90-kube-api-access-89tsw\") pod \"packageserver-d55dfcdfc-knw9f\" (UID: \"a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.687082 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/f1f59b90-125d-4668-ba39-27c606b07de0-etcd-ca\") pod \"etcd-operator-b45778765-qfb5h\" (UID: \"f1f59b90-125d-4668-ba39-27c606b07de0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.687104 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f1f59b90-125d-4668-ba39-27c606b07de0-etcd-client\") pod \"etcd-operator-b45778765-qfb5h\" (UID: \"f1f59b90-125d-4668-ba39-27c606b07de0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.688153 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0e2a7348-cb1f-4ad4-8e45-82e8386569a9-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rm9mt\" (UID: \"0e2a7348-cb1f-4ad4-8e45-82e8386569a9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rm9mt" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.689515 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.691946 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-trusted-ca\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.692346 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-zcpdn"] Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.694745 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-registry-tls\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.695876 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-plvzs" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.710429 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-md22d"] Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.713278 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4hjg" event={"ID":"6f227808-18ba-4538-aed5-d994ba07c2fb","Type":"ContainerStarted","Data":"b880ffce700c812794a6547210827cc783f3d5e0744f732eabbc2bd0e1b9194d"} Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.716391 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-bound-sa-token\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.743027 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0e2a7348-cb1f-4ad4-8e45-82e8386569a9-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rm9mt\" (UID: \"0e2a7348-cb1f-4ad4-8e45-82e8386569a9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rm9mt" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.765527 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5rkl\" (UniqueName: \"kubernetes.io/projected/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-kube-api-access-r5rkl\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: W1001 05:31:33.783383 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8c150ea9_cd3d_4dab_9701_e7bfef917b0e.slice/crio-fd8b239a2a2161e0810dacb7c7e8d89cef53a8ffc0abdff7fc9f11344049ccda WatchSource:0}: Error finding container fd8b239a2a2161e0810dacb7c7e8d89cef53a8ffc0abdff7fc9f11344049ccda: Status 404 returned error can't find the container with id fd8b239a2a2161e0810dacb7c7e8d89cef53a8ffc0abdff7fc9f11344049ccda Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.787523 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjqs5\" (UniqueName: \"kubernetes.io/projected/0e2a7348-cb1f-4ad4-8e45-82e8386569a9-kube-api-access-sjqs5\") pod \"cluster-image-registry-operator-dc59b4c8b-rm9mt\" (UID: \"0e2a7348-cb1f-4ad4-8e45-82e8386569a9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rm9mt" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.787708 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3b56c64d-0bba-4d20-a6ae-e6c9349d0c04-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-r5ghr\" (UID: \"3b56c64d-0bba-4d20-a6ae-e6c9349d0c04\") " pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.787742 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ed4a1d37-47d2-4949-8841-4ab7f929e6fa-node-bootstrap-token\") pod \"machine-config-server-nzndp\" (UID: \"ed4a1d37-47d2-4949-8841-4ab7f929e6fa\") " pod="openshift-machine-config-operator/machine-config-server-nzndp" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.787772 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nllk4\" (UniqueName: \"kubernetes.io/projected/f4b45a54-4eb0-4a00-8ba2-bde133539b24-kube-api-access-nllk4\") pod \"dns-default-7mdcj\" (UID: \"f4b45a54-4eb0-4a00-8ba2-bde133539b24\") " pod="openshift-dns/dns-default-7mdcj" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.787802 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a988c7a-aae7-4f17-bdb0-8395fd40008d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-vpzn9\" (UID: \"6a988c7a-aae7-4f17-bdb0-8395fd40008d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vpzn9" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.787826 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wb45g\" (UniqueName: \"kubernetes.io/projected/f1f59b90-125d-4668-ba39-27c606b07de0-kube-api-access-wb45g\") pod \"etcd-operator-b45778765-qfb5h\" (UID: \"f1f59b90-125d-4668-ba39-27c606b07de0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.787845 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5lkw\" (UniqueName: \"kubernetes.io/projected/d5278166-ab19-4464-87d6-3fc6fe335855-kube-api-access-q5lkw\") pod \"csi-hostpathplugin-mzsmw\" (UID: \"d5278166-ab19-4464-87d6-3fc6fe335855\") " pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.787866 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/b3b2fb02-1431-4fd3-83e9-37770bb57825-signing-cabundle\") pod \"service-ca-9c57cc56f-qz8x2\" (UID: \"b3b2fb02-1431-4fd3-83e9-37770bb57825\") " pod="openshift-service-ca/service-ca-9c57cc56f-qz8x2" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.787888 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a731ee04-6aba-49d9-b8b0-392d31d55da2-secret-volume\") pod \"collect-profiles-29321610-wjxrz\" (UID: \"a731ee04-6aba-49d9-b8b0-392d31d55da2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.787918 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7c80f42-b230-4eb5-a285-b6e4a60a34ad-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ngdlf\" (UID: \"d7c80f42-b230-4eb5-a285-b6e4a60a34ad\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ngdlf" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.787949 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/61ab52df-4ef3-4f24-a8f3-01c7fef8c99f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-kqlm6\" (UID: \"61ab52df-4ef3-4f24-a8f3-01c7fef8c99f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kqlm6" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.787968 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/653d6195-02ef-4ded-a397-9dd414f5a66e-srv-cert\") pod \"olm-operator-6b444d44fb-7ldmq\" (UID: \"653d6195-02ef-4ded-a397-9dd414f5a66e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7ldmq" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.787982 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d7c80f42-b230-4eb5-a285-b6e4a60a34ad-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ngdlf\" (UID: \"d7c80f42-b230-4eb5-a285-b6e4a60a34ad\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ngdlf" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788003 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3b56c64d-0bba-4d20-a6ae-e6c9349d0c04-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-r5ghr\" (UID: \"3b56c64d-0bba-4d20-a6ae-e6c9349d0c04\") " pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788024 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6a988c7a-aae7-4f17-bdb0-8395fd40008d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-vpzn9\" (UID: \"6a988c7a-aae7-4f17-bdb0-8395fd40008d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vpzn9" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788046 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/d5278166-ab19-4464-87d6-3fc6fe335855-socket-dir\") pod \"csi-hostpathplugin-mzsmw\" (UID: \"d5278166-ab19-4464-87d6-3fc6fe335855\") " pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788077 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlprf\" (UniqueName: \"kubernetes.io/projected/b0dc486f-e096-48b2-9708-dd01ff18cfef-kube-api-access-jlprf\") pod \"package-server-manager-789f6589d5-5mmdv\" (UID: \"b0dc486f-e096-48b2-9708-dd01ff18cfef\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5mmdv" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788095 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1f59b90-125d-4668-ba39-27c606b07de0-config\") pod \"etcd-operator-b45778765-qfb5h\" (UID: \"f1f59b90-125d-4668-ba39-27c606b07de0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788115 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/b3b2fb02-1431-4fd3-83e9-37770bb57825-signing-key\") pod \"service-ca-9c57cc56f-qz8x2\" (UID: \"b3b2fb02-1431-4fd3-83e9-37770bb57825\") " pod="openshift-service-ca/service-ca-9c57cc56f-qz8x2" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788130 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d7c80f42-b230-4eb5-a285-b6e4a60a34ad-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ngdlf\" (UID: \"d7c80f42-b230-4eb5-a285-b6e4a60a34ad\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ngdlf" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788163 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a988c7a-aae7-4f17-bdb0-8395fd40008d-config\") pod \"kube-controller-manager-operator-78b949d7b-vpzn9\" (UID: \"6a988c7a-aae7-4f17-bdb0-8395fd40008d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vpzn9" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788182 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/276ccab1-820f-4a4f-81b2-fdca6aa59628-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-c5cbp\" (UID: \"276ccab1-820f-4a4f-81b2-fdca6aa59628\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c5cbp" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788196 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdtzc\" (UniqueName: \"kubernetes.io/projected/ed4a1d37-47d2-4949-8841-4ab7f929e6fa-kube-api-access-gdtzc\") pod \"machine-config-server-nzndp\" (UID: \"ed4a1d37-47d2-4949-8841-4ab7f929e6fa\") " pod="openshift-machine-config-operator/machine-config-server-nzndp" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788226 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89tsw\" (UniqueName: \"kubernetes.io/projected/a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90-kube-api-access-89tsw\") pod \"packageserver-d55dfcdfc-knw9f\" (UID: \"a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788241 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/f1f59b90-125d-4668-ba39-27c606b07de0-etcd-ca\") pod \"etcd-operator-b45778765-qfb5h\" (UID: \"f1f59b90-125d-4668-ba39-27c606b07de0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788258 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f1f59b90-125d-4668-ba39-27c606b07de0-etcd-client\") pod \"etcd-operator-b45778765-qfb5h\" (UID: \"f1f59b90-125d-4668-ba39-27c606b07de0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788279 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/d5278166-ab19-4464-87d6-3fc6fe335855-csi-data-dir\") pod \"csi-hostpathplugin-mzsmw\" (UID: \"d5278166-ab19-4464-87d6-3fc6fe335855\") " pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788295 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f4b45a54-4eb0-4a00-8ba2-bde133539b24-metrics-tls\") pod \"dns-default-7mdcj\" (UID: \"f4b45a54-4eb0-4a00-8ba2-bde133539b24\") " pod="openshift-dns/dns-default-7mdcj" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788337 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djzrj\" (UniqueName: \"kubernetes.io/projected/b3b2fb02-1431-4fd3-83e9-37770bb57825-kube-api-access-djzrj\") pod \"service-ca-9c57cc56f-qz8x2\" (UID: \"b3b2fb02-1431-4fd3-83e9-37770bb57825\") " pod="openshift-service-ca/service-ca-9c57cc56f-qz8x2" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788353 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qxmb\" (UniqueName: \"kubernetes.io/projected/3b56c64d-0bba-4d20-a6ae-e6c9349d0c04-kube-api-access-9qxmb\") pod \"marketplace-operator-79b997595-r5ghr\" (UID: \"3b56c64d-0bba-4d20-a6ae-e6c9349d0c04\") " pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788370 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90-webhook-cert\") pod \"packageserver-d55dfcdfc-knw9f\" (UID: \"a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788399 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2k6j9\" (UniqueName: \"kubernetes.io/projected/a731ee04-6aba-49d9-b8b0-392d31d55da2-kube-api-access-2k6j9\") pod \"collect-profiles-29321610-wjxrz\" (UID: \"a731ee04-6aba-49d9-b8b0-392d31d55da2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788424 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ee2a5e15-cecf-470a-9f59-acef3b3e87a6-srv-cert\") pod \"catalog-operator-68c6474976-zh9kb\" (UID: \"ee2a5e15-cecf-470a-9f59-acef3b3e87a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zh9kb" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788440 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a731ee04-6aba-49d9-b8b0-392d31d55da2-config-volume\") pod \"collect-profiles-29321610-wjxrz\" (UID: \"a731ee04-6aba-49d9-b8b0-392d31d55da2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788473 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/27b2dcf3-b40d-4c3d-a9a9-b68c77b96669-cert\") pod \"ingress-canary-6226h\" (UID: \"27b2dcf3-b40d-4c3d-a9a9-b68c77b96669\") " pod="openshift-ingress-canary/ingress-canary-6226h" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788525 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/b0dc486f-e096-48b2-9708-dd01ff18cfef-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-5mmdv\" (UID: \"b0dc486f-e096-48b2-9708-dd01ff18cfef\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5mmdv" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788542 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmwqq\" (UniqueName: \"kubernetes.io/projected/61ab52df-4ef3-4f24-a8f3-01c7fef8c99f-kube-api-access-hmwqq\") pod \"control-plane-machine-set-operator-78cbb6b69f-kqlm6\" (UID: \"61ab52df-4ef3-4f24-a8f3-01c7fef8c99f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kqlm6" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788559 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/d5278166-ab19-4464-87d6-3fc6fe335855-plugins-dir\") pod \"csi-hostpathplugin-mzsmw\" (UID: \"d5278166-ab19-4464-87d6-3fc6fe335855\") " pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788583 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f1f59b90-125d-4668-ba39-27c606b07de0-serving-cert\") pod \"etcd-operator-b45778765-qfb5h\" (UID: \"f1f59b90-125d-4668-ba39-27c606b07de0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788598 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/276ccab1-820f-4a4f-81b2-fdca6aa59628-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-c5cbp\" (UID: \"276ccab1-820f-4a4f-81b2-fdca6aa59628\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c5cbp" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788613 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrvlf\" (UniqueName: \"kubernetes.io/projected/ee2a5e15-cecf-470a-9f59-acef3b3e87a6-kube-api-access-qrvlf\") pod \"catalog-operator-68c6474976-zh9kb\" (UID: \"ee2a5e15-cecf-470a-9f59-acef3b3e87a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zh9kb" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788668 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clr7k\" (UniqueName: \"kubernetes.io/projected/53107074-138a-4df9-ab42-9e09b5a257d3-kube-api-access-clr7k\") pod \"migrator-59844c95c7-p6wgf\" (UID: \"53107074-138a-4df9-ab42-9e09b5a257d3\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-p6wgf" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788684 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/f1f59b90-125d-4668-ba39-27c606b07de0-etcd-service-ca\") pod \"etcd-operator-b45778765-qfb5h\" (UID: \"f1f59b90-125d-4668-ba39-27c606b07de0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788701 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ee2a5e15-cecf-470a-9f59-acef3b3e87a6-profile-collector-cert\") pod \"catalog-operator-68c6474976-zh9kb\" (UID: \"ee2a5e15-cecf-470a-9f59-acef3b3e87a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zh9kb" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788725 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/d5278166-ab19-4464-87d6-3fc6fe335855-mountpoint-dir\") pod \"csi-hostpathplugin-mzsmw\" (UID: \"d5278166-ab19-4464-87d6-3fc6fe335855\") " pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788738 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/276ccab1-820f-4a4f-81b2-fdca6aa59628-config\") pod \"kube-apiserver-operator-766d6c64bb-c5cbp\" (UID: \"276ccab1-820f-4a4f-81b2-fdca6aa59628\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c5cbp" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788757 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788773 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90-tmpfs\") pod \"packageserver-d55dfcdfc-knw9f\" (UID: \"a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788788 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ed4a1d37-47d2-4949-8841-4ab7f929e6fa-certs\") pod \"machine-config-server-nzndp\" (UID: \"ed4a1d37-47d2-4949-8841-4ab7f929e6fa\") " pod="openshift-machine-config-operator/machine-config-server-nzndp" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788803 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slxxb\" (UniqueName: \"kubernetes.io/projected/27b2dcf3-b40d-4c3d-a9a9-b68c77b96669-kube-api-access-slxxb\") pod \"ingress-canary-6226h\" (UID: \"27b2dcf3-b40d-4c3d-a9a9-b68c77b96669\") " pod="openshift-ingress-canary/ingress-canary-6226h" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788817 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f4b45a54-4eb0-4a00-8ba2-bde133539b24-config-volume\") pod \"dns-default-7mdcj\" (UID: \"f4b45a54-4eb0-4a00-8ba2-bde133539b24\") " pod="openshift-dns/dns-default-7mdcj" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788832 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/653d6195-02ef-4ded-a397-9dd414f5a66e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-7ldmq\" (UID: \"653d6195-02ef-4ded-a397-9dd414f5a66e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7ldmq" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788848 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jr6fv\" (UniqueName: \"kubernetes.io/projected/653d6195-02ef-4ded-a397-9dd414f5a66e-kube-api-access-jr6fv\") pod \"olm-operator-6b444d44fb-7ldmq\" (UID: \"653d6195-02ef-4ded-a397-9dd414f5a66e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7ldmq" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788861 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/d5278166-ab19-4464-87d6-3fc6fe335855-registration-dir\") pod \"csi-hostpathplugin-mzsmw\" (UID: \"d5278166-ab19-4464-87d6-3fc6fe335855\") " pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.788876 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90-apiservice-cert\") pod \"packageserver-d55dfcdfc-knw9f\" (UID: \"a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.790433 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3b56c64d-0bba-4d20-a6ae-e6c9349d0c04-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-r5ghr\" (UID: \"3b56c64d-0bba-4d20-a6ae-e6c9349d0c04\") " pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.794778 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f1f59b90-125d-4668-ba39-27c606b07de0-serving-cert\") pod \"etcd-operator-b45778765-qfb5h\" (UID: \"f1f59b90-125d-4668-ba39-27c606b07de0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.795710 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90-apiservice-cert\") pod \"packageserver-d55dfcdfc-knw9f\" (UID: \"a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.796577 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mqx4x"] Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.798249 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90-webhook-cert\") pod \"packageserver-d55dfcdfc-knw9f\" (UID: \"a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.798267 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/276ccab1-820f-4a4f-81b2-fdca6aa59628-config\") pod \"kube-apiserver-operator-766d6c64bb-c5cbp\" (UID: \"276ccab1-820f-4a4f-81b2-fdca6aa59628\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c5cbp" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.798417 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ed4a1d37-47d2-4949-8841-4ab7f929e6fa-node-bootstrap-token\") pod \"machine-config-server-nzndp\" (UID: \"ed4a1d37-47d2-4949-8841-4ab7f929e6fa\") " pod="openshift-machine-config-operator/machine-config-server-nzndp" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.798517 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a731ee04-6aba-49d9-b8b0-392d31d55da2-config-volume\") pod \"collect-profiles-29321610-wjxrz\" (UID: \"a731ee04-6aba-49d9-b8b0-392d31d55da2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.798949 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/d5278166-ab19-4464-87d6-3fc6fe335855-socket-dir\") pod \"csi-hostpathplugin-mzsmw\" (UID: \"d5278166-ab19-4464-87d6-3fc6fe335855\") " pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" Oct 01 05:31:33 crc kubenswrapper[4661]: E1001 05:31:33.799395 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:34.299379385 +0000 UTC m=+143.237358099 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.799420 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/276ccab1-820f-4a4f-81b2-fdca6aa59628-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-c5cbp\" (UID: \"276ccab1-820f-4a4f-81b2-fdca6aa59628\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c5cbp" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.800091 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90-tmpfs\") pod \"packageserver-d55dfcdfc-knw9f\" (UID: \"a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.800241 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f4b45a54-4eb0-4a00-8ba2-bde133539b24-config-volume\") pod \"dns-default-7mdcj\" (UID: \"f4b45a54-4eb0-4a00-8ba2-bde133539b24\") " pod="openshift-dns/dns-default-7mdcj" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.801091 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/27b2dcf3-b40d-4c3d-a9a9-b68c77b96669-cert\") pod \"ingress-canary-6226h\" (UID: \"27b2dcf3-b40d-4c3d-a9a9-b68c77b96669\") " pod="openshift-ingress-canary/ingress-canary-6226h" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.803238 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ed4a1d37-47d2-4949-8841-4ab7f929e6fa-certs\") pod \"machine-config-server-nzndp\" (UID: \"ed4a1d37-47d2-4949-8841-4ab7f929e6fa\") " pod="openshift-machine-config-operator/machine-config-server-nzndp" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.803365 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/653d6195-02ef-4ded-a397-9dd414f5a66e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-7ldmq\" (UID: \"653d6195-02ef-4ded-a397-9dd414f5a66e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7ldmq" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.803430 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/d5278166-ab19-4464-87d6-3fc6fe335855-registration-dir\") pod \"csi-hostpathplugin-mzsmw\" (UID: \"d5278166-ab19-4464-87d6-3fc6fe335855\") " pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.804128 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d7c80f42-b230-4eb5-a285-b6e4a60a34ad-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ngdlf\" (UID: \"d7c80f42-b230-4eb5-a285-b6e4a60a34ad\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ngdlf" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.804887 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7c80f42-b230-4eb5-a285-b6e4a60a34ad-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ngdlf\" (UID: \"d7c80f42-b230-4eb5-a285-b6e4a60a34ad\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ngdlf" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.805035 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/61ab52df-4ef3-4f24-a8f3-01c7fef8c99f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-kqlm6\" (UID: \"61ab52df-4ef3-4f24-a8f3-01c7fef8c99f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kqlm6" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.805832 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/b0dc486f-e096-48b2-9708-dd01ff18cfef-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-5mmdv\" (UID: \"b0dc486f-e096-48b2-9708-dd01ff18cfef\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5mmdv" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.806352 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/b3b2fb02-1431-4fd3-83e9-37770bb57825-signing-cabundle\") pod \"service-ca-9c57cc56f-qz8x2\" (UID: \"b3b2fb02-1431-4fd3-83e9-37770bb57825\") " pod="openshift-service-ca/service-ca-9c57cc56f-qz8x2" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.806814 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/d5278166-ab19-4464-87d6-3fc6fe335855-plugins-dir\") pod \"csi-hostpathplugin-mzsmw\" (UID: \"d5278166-ab19-4464-87d6-3fc6fe335855\") " pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.807157 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/d5278166-ab19-4464-87d6-3fc6fe335855-mountpoint-dir\") pod \"csi-hostpathplugin-mzsmw\" (UID: \"d5278166-ab19-4464-87d6-3fc6fe335855\") " pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.808550 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/653d6195-02ef-4ded-a397-9dd414f5a66e-srv-cert\") pod \"olm-operator-6b444d44fb-7ldmq\" (UID: \"653d6195-02ef-4ded-a397-9dd414f5a66e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7ldmq" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.809326 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/d5278166-ab19-4464-87d6-3fc6fe335855-csi-data-dir\") pod \"csi-hostpathplugin-mzsmw\" (UID: \"d5278166-ab19-4464-87d6-3fc6fe335855\") " pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.809839 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/f1f59b90-125d-4668-ba39-27c606b07de0-etcd-ca\") pod \"etcd-operator-b45778765-qfb5h\" (UID: \"f1f59b90-125d-4668-ba39-27c606b07de0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.809898 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a988c7a-aae7-4f17-bdb0-8395fd40008d-config\") pod \"kube-controller-manager-operator-78b949d7b-vpzn9\" (UID: \"6a988c7a-aae7-4f17-bdb0-8395fd40008d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vpzn9" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.810354 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f4b45a54-4eb0-4a00-8ba2-bde133539b24-metrics-tls\") pod \"dns-default-7mdcj\" (UID: \"f4b45a54-4eb0-4a00-8ba2-bde133539b24\") " pod="openshift-dns/dns-default-7mdcj" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.813619 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a731ee04-6aba-49d9-b8b0-392d31d55da2-secret-volume\") pod \"collect-profiles-29321610-wjxrz\" (UID: \"a731ee04-6aba-49d9-b8b0-392d31d55da2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.814221 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/f1f59b90-125d-4668-ba39-27c606b07de0-etcd-service-ca\") pod \"etcd-operator-b45778765-qfb5h\" (UID: \"f1f59b90-125d-4668-ba39-27c606b07de0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.814502 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3b56c64d-0bba-4d20-a6ae-e6c9349d0c04-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-r5ghr\" (UID: \"3b56c64d-0bba-4d20-a6ae-e6c9349d0c04\") " pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.814820 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f1f59b90-125d-4668-ba39-27c606b07de0-etcd-client\") pod \"etcd-operator-b45778765-qfb5h\" (UID: \"f1f59b90-125d-4668-ba39-27c606b07de0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.815235 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a988c7a-aae7-4f17-bdb0-8395fd40008d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-vpzn9\" (UID: \"6a988c7a-aae7-4f17-bdb0-8395fd40008d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vpzn9" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.815655 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/b3b2fb02-1431-4fd3-83e9-37770bb57825-signing-key\") pod \"service-ca-9c57cc56f-qz8x2\" (UID: \"b3b2fb02-1431-4fd3-83e9-37770bb57825\") " pod="openshift-service-ca/service-ca-9c57cc56f-qz8x2" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.863857 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qxmb\" (UniqueName: \"kubernetes.io/projected/3b56c64d-0bba-4d20-a6ae-e6c9349d0c04-kube-api-access-9qxmb\") pod \"marketplace-operator-79b997595-r5ghr\" (UID: \"3b56c64d-0bba-4d20-a6ae-e6c9349d0c04\") " pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.889405 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.889574 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2k6j9\" (UniqueName: \"kubernetes.io/projected/a731ee04-6aba-49d9-b8b0-392d31d55da2-kube-api-access-2k6j9\") pod \"collect-profiles-29321610-wjxrz\" (UID: \"a731ee04-6aba-49d9-b8b0-392d31d55da2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz" Oct 01 05:31:33 crc kubenswrapper[4661]: E1001 05:31:33.889669 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:34.389655336 +0000 UTC m=+143.327633950 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.889773 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: E1001 05:31:33.890025 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:34.390018625 +0000 UTC m=+143.327997239 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.908183 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrvlf\" (UniqueName: \"kubernetes.io/projected/ee2a5e15-cecf-470a-9f59-acef3b3e87a6-kube-api-access-qrvlf\") pod \"catalog-operator-68c6474976-zh9kb\" (UID: \"ee2a5e15-cecf-470a-9f59-acef3b3e87a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zh9kb" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.924331 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6a988c7a-aae7-4f17-bdb0-8395fd40008d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-vpzn9\" (UID: \"6a988c7a-aae7-4f17-bdb0-8395fd40008d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vpzn9" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.947914 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clr7k\" (UniqueName: \"kubernetes.io/projected/53107074-138a-4df9-ab42-9e09b5a257d3-kube-api-access-clr7k\") pod \"migrator-59844c95c7-p6wgf\" (UID: \"53107074-138a-4df9-ab42-9e09b5a257d3\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-p6wgf" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.963376 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlprf\" (UniqueName: \"kubernetes.io/projected/b0dc486f-e096-48b2-9708-dd01ff18cfef-kube-api-access-jlprf\") pod \"package-server-manager-789f6589d5-5mmdv\" (UID: \"b0dc486f-e096-48b2-9708-dd01ff18cfef\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5mmdv" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.971797 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ee2a5e15-cecf-470a-9f59-acef3b3e87a6-srv-cert\") pod \"catalog-operator-68c6474976-zh9kb\" (UID: \"ee2a5e15-cecf-470a-9f59-acef3b3e87a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zh9kb" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.973475 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ee2a5e15-cecf-470a-9f59-acef3b3e87a6-profile-collector-cert\") pod \"catalog-operator-68c6474976-zh9kb\" (UID: \"ee2a5e15-cecf-470a-9f59-acef3b3e87a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zh9kb" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.974055 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-bnbps"] Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.974975 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djzrj\" (UniqueName: \"kubernetes.io/projected/b3b2fb02-1431-4fd3-83e9-37770bb57825-kube-api-access-djzrj\") pod \"service-ca-9c57cc56f-qz8x2\" (UID: \"b3b2fb02-1431-4fd3-83e9-37770bb57825\") " pod="openshift-service-ca/service-ca-9c57cc56f-qz8x2" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.984734 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rm9mt" Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.991437 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:33 crc kubenswrapper[4661]: E1001 05:31:33.991602 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:34.491581832 +0000 UTC m=+143.429560446 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.991791 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:33 crc kubenswrapper[4661]: E1001 05:31:33.992128 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:34.492114518 +0000 UTC m=+143.430093132 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:33 crc kubenswrapper[4661]: I1001 05:31:33.992804 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-vtztn"] Oct 01 05:31:33 crc kubenswrapper[4661]: W1001 05:31:33.993160 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5731cb34_0698_48be_a662_79dd89e808b2.slice/crio-85e1aa15c7522ddc2c516c7096cccb155410f5ae72b8d716d33f2d315fe5bdda WatchSource:0}: Error finding container 85e1aa15c7522ddc2c516c7096cccb155410f5ae72b8d716d33f2d315fe5bdda: Status 404 returned error can't find the container with id 85e1aa15c7522ddc2c516c7096cccb155410f5ae72b8d716d33f2d315fe5bdda Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.003881 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wb45g\" (UniqueName: \"kubernetes.io/projected/f1f59b90-125d-4668-ba39-27c606b07de0-kube-api-access-wb45g\") pod \"etcd-operator-b45778765-qfb5h\" (UID: \"f1f59b90-125d-4668-ba39-27c606b07de0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.025309 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5mmdv" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.030147 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nllk4\" (UniqueName: \"kubernetes.io/projected/f4b45a54-4eb0-4a00-8ba2-bde133539b24-kube-api-access-nllk4\") pod \"dns-default-7mdcj\" (UID: \"f4b45a54-4eb0-4a00-8ba2-bde133539b24\") " pod="openshift-dns/dns-default-7mdcj" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.054894 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5lkw\" (UniqueName: \"kubernetes.io/projected/d5278166-ab19-4464-87d6-3fc6fe335855-kube-api-access-q5lkw\") pod \"csi-hostpathplugin-mzsmw\" (UID: \"d5278166-ab19-4464-87d6-3fc6fe335855\") " pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.055716 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-nkksl"] Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.063585 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vpzn9" Oct 01 05:31:34 crc kubenswrapper[4661]: W1001 05:31:34.068506 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod591acb44_aa58_4103_98f7_b68e067bc90d.slice/crio-a33b4c263a20f7a921d44ec94309d1461c0200aac833c6768045602466f890d8 WatchSource:0}: Error finding container a33b4c263a20f7a921d44ec94309d1461c0200aac833c6768045602466f890d8: Status 404 returned error can't find the container with id a33b4c263a20f7a921d44ec94309d1461c0200aac833c6768045602466f890d8 Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.072934 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.076131 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmwqq\" (UniqueName: \"kubernetes.io/projected/61ab52df-4ef3-4f24-a8f3-01c7fef8c99f-kube-api-access-hmwqq\") pod \"control-plane-machine-set-operator-78cbb6b69f-kqlm6\" (UID: \"61ab52df-4ef3-4f24-a8f3-01c7fef8c99f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kqlm6" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.076247 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-x28q5"] Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.080428 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.080819 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-j9mhf"] Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.087127 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zh9kb" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.090579 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-2nj65"] Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.091150 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jr6fv\" (UniqueName: \"kubernetes.io/projected/653d6195-02ef-4ded-a397-9dd414f5a66e-kube-api-access-jr6fv\") pod \"olm-operator-6b444d44fb-7ldmq\" (UID: \"653d6195-02ef-4ded-a397-9dd414f5a66e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7ldmq" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.093452 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:34 crc kubenswrapper[4661]: E1001 05:31:34.093595 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:34.593579382 +0000 UTC m=+143.531557996 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.093894 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:34 crc kubenswrapper[4661]: E1001 05:31:34.094178 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:34.594170508 +0000 UTC m=+143.532149122 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.094511 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-qz8x2" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.099255 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1f59b90-125d-4668-ba39-27c606b07de0-config\") pod \"etcd-operator-b45778765-qfb5h\" (UID: \"f1f59b90-125d-4668-ba39-27c606b07de0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.103236 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slxxb\" (UniqueName: \"kubernetes.io/projected/27b2dcf3-b40d-4c3d-a9a9-b68c77b96669-kube-api-access-slxxb\") pod \"ingress-canary-6226h\" (UID: \"27b2dcf3-b40d-4c3d-a9a9-b68c77b96669\") " pod="openshift-ingress-canary/ingress-canary-6226h" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.103309 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d7c80f42-b230-4eb5-a285-b6e4a60a34ad-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ngdlf\" (UID: \"d7c80f42-b230-4eb5-a285-b6e4a60a34ad\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ngdlf" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.109563 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kqlm6" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.115705 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ngdlf" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.122041 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/276ccab1-820f-4a4f-81b2-fdca6aa59628-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-c5cbp\" (UID: \"276ccab1-820f-4a4f-81b2-fdca6aa59628\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c5cbp" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.123296 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7ldmq" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.129807 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-p6wgf" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.144813 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.153803 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-6226h" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.161422 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-7mdcj" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.168037 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdtzc\" (UniqueName: \"kubernetes.io/projected/ed4a1d37-47d2-4949-8841-4ab7f929e6fa-kube-api-access-gdtzc\") pod \"machine-config-server-nzndp\" (UID: \"ed4a1d37-47d2-4949-8841-4ab7f929e6fa\") " pod="openshift-machine-config-operator/machine-config-server-nzndp" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.184300 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.190674 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-nzndp" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.195311 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:34 crc kubenswrapper[4661]: E1001 05:31:34.195573 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:34.695545219 +0000 UTC m=+143.633523823 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.195846 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:34 crc kubenswrapper[4661]: E1001 05:31:34.196281 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:34.69627105 +0000 UTC m=+143.634249664 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.248306 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89tsw\" (UniqueName: \"kubernetes.io/projected/a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90-kube-api-access-89tsw\") pod \"packageserver-d55dfcdfc-knw9f\" (UID: \"a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.257193 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-2bwjb"] Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.271286 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-222p4"] Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.297380 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:34 crc kubenswrapper[4661]: E1001 05:31:34.297549 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:34.797528179 +0000 UTC m=+143.735506793 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.297682 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:34 crc kubenswrapper[4661]: E1001 05:31:34.298110 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:34.798103454 +0000 UTC m=+143.736082068 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.309065 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.309111 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.335888 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-nwldz"] Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.398683 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:34 crc kubenswrapper[4661]: E1001 05:31:34.398764 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:34.898736136 +0000 UTC m=+143.836714760 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.399017 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:34 crc kubenswrapper[4661]: E1001 05:31:34.399397 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:34.899382603 +0000 UTC m=+143.837361237 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.402704 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c5cbp" Oct 01 05:31:34 crc kubenswrapper[4661]: W1001 05:31:34.425755 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod560c851e_0596_4c5f_9191_a7201149e335.slice/crio-b1dfe960b65d1668d0183e7138704fbf7e3a41d5bb69dbb7d18d2468d6973cc7 WatchSource:0}: Error finding container b1dfe960b65d1668d0183e7138704fbf7e3a41d5bb69dbb7d18d2468d6973cc7: Status 404 returned error can't find the container with id b1dfe960b65d1668d0183e7138704fbf7e3a41d5bb69dbb7d18d2468d6973cc7 Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.437853 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f" Oct 01 05:31:34 crc kubenswrapper[4661]: W1001 05:31:34.442284 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5d412a34_f608_4b2d_8485_197efa42d0f9.slice/crio-38bbc06e481398400ec9f11161eceb474bfe53998f46cb3cd76feca47722583c WatchSource:0}: Error finding container 38bbc06e481398400ec9f11161eceb474bfe53998f46cb3cd76feca47722583c: Status 404 returned error can't find the container with id 38bbc06e481398400ec9f11161eceb474bfe53998f46cb3cd76feca47722583c Oct 01 05:31:34 crc kubenswrapper[4661]: W1001 05:31:34.445790 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9a25b149_9f1f_4723_9c39_e6070685311c.slice/crio-7c6b6bd8474addadc3031741aeff46a81cc3e162b1ac563c50ddfa8f14842d1f WatchSource:0}: Error finding container 7c6b6bd8474addadc3031741aeff46a81cc3e162b1ac563c50ddfa8f14842d1f: Status 404 returned error can't find the container with id 7c6b6bd8474addadc3031741aeff46a81cc3e162b1ac563c50ddfa8f14842d1f Oct 01 05:31:34 crc kubenswrapper[4661]: W1001 05:31:34.462120 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod94f516db_b964_4e71_9fdf_8276800923ad.slice/crio-96b64b3a3924f400f703da6be1df305e5c82cd28a2062594545fc6795bffcc01 WatchSource:0}: Error finding container 96b64b3a3924f400f703da6be1df305e5c82cd28a2062594545fc6795bffcc01: Status 404 returned error can't find the container with id 96b64b3a3924f400f703da6be1df305e5c82cd28a2062594545fc6795bffcc01 Oct 01 05:31:34 crc kubenswrapper[4661]: W1001 05:31:34.462948 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfc570c7d_b32c_4d0b_ba06_73b13143cd0c.slice/crio-0963b2432a173c480a8864d845be0c141ffff6cee1389fbb8b38cdf0903e2a1d WatchSource:0}: Error finding container 0963b2432a173c480a8864d845be0c141ffff6cee1389fbb8b38cdf0903e2a1d: Status 404 returned error can't find the container with id 0963b2432a173c480a8864d845be0c141ffff6cee1389fbb8b38cdf0903e2a1d Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.499651 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:34 crc kubenswrapper[4661]: E1001 05:31:34.500243 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:35.00019858 +0000 UTC m=+143.938177194 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.500373 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:34 crc kubenswrapper[4661]: E1001 05:31:34.500735 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:35.000717704 +0000 UTC m=+143.938696318 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.601398 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:34 crc kubenswrapper[4661]: E1001 05:31:34.601516 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:35.10149547 +0000 UTC m=+144.039474084 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.601759 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:34 crc kubenswrapper[4661]: E1001 05:31:34.602020 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:35.102010744 +0000 UTC m=+144.039989358 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.666534 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rm9mt"] Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.669049 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-plvzs"] Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.705518 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:34 crc kubenswrapper[4661]: E1001 05:31:34.705734 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:35.205709729 +0000 UTC m=+144.143688343 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.705832 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:34 crc kubenswrapper[4661]: E1001 05:31:34.706301 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:35.206287905 +0000 UTC m=+144.144266519 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.721587 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-2bwjb" event={"ID":"9a25b149-9f1f-4723-9c39-e6070685311c","Type":"ContainerStarted","Data":"7c6b6bd8474addadc3031741aeff46a81cc3e162b1ac563c50ddfa8f14842d1f"} Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.726614 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-nwldz" event={"ID":"94f516db-b964-4e71-9fdf-8276800923ad","Type":"ContainerStarted","Data":"96b64b3a3924f400f703da6be1df305e5c82cd28a2062594545fc6795bffcc01"} Oct 01 05:31:34 crc kubenswrapper[4661]: W1001 05:31:34.727015 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod147f1c2a_2e7c_4443_a428_1ac5222baf00.slice/crio-99bc98ef13e3b289783a1b995e4a568fc8efb297888239400a023bf2a2bcf753 WatchSource:0}: Error finding container 99bc98ef13e3b289783a1b995e4a568fc8efb297888239400a023bf2a2bcf753: Status 404 returned error can't find the container with id 99bc98ef13e3b289783a1b995e4a568fc8efb297888239400a023bf2a2bcf753 Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.727613 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vtztn" event={"ID":"3b8c7190-1906-4f8f-b93a-85ad0b277e8f","Type":"ContainerStarted","Data":"e7d0b63742efd34c63bca63f30d425186ab9e61c6ed192eeb2e88069b21e7fbf"} Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.728956 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-md22d" event={"ID":"8c150ea9-cd3d-4dab-9701-e7bfef917b0e","Type":"ContainerStarted","Data":"fd8b239a2a2161e0810dacb7c7e8d89cef53a8ffc0abdff7fc9f11344049ccda"} Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.730076 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zcpdn" event={"ID":"34ae1ed8-796d-4296-b003-aed0d5b82e52","Type":"ContainerStarted","Data":"5cd5cb012dd1ed14582fc67a0220b0ac2a78d290c6111230f1a6eeee432b0431"} Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.730870 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-sppsj" event={"ID":"0c21d97e-1221-464d-ae54-56ea6e626e00","Type":"ContainerStarted","Data":"a0f62614caa9149a490f77e9bcf653c705506164e8388b681888f914f7a469b4"} Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.731939 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-shn64" event={"ID":"7da01014-205a-4c43-8640-653fd3b65c0a","Type":"ContainerStarted","Data":"b4480099e4d9ab71ad796e07a2572fb37235dc198bd3189f7fa133566bb7dd1d"} Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.733211 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" event={"ID":"5731cb34-0698-48be-a662-79dd89e808b2","Type":"ContainerStarted","Data":"85e1aa15c7522ddc2c516c7096cccb155410f5ae72b8d716d33f2d315fe5bdda"} Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.733978 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-222p4" event={"ID":"fc570c7d-b32c-4d0b-ba06-73b13143cd0c","Type":"ContainerStarted","Data":"0963b2432a173c480a8864d845be0c141ffff6cee1389fbb8b38cdf0903e2a1d"} Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.735204 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-nkksl" event={"ID":"b5dd7e68-1886-4112-9923-bc135c6d5302","Type":"ContainerStarted","Data":"24e8daef574a43efc610088e501b72915be6d44094993e96e3def86b0fbfbef5"} Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.736223 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rm9mt" event={"ID":"0e2a7348-cb1f-4ad4-8e45-82e8386569a9","Type":"ContainerStarted","Data":"3f133b1f13cde140d0008cc19adb9f549270727757673df2850e04f243b08457"} Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.737289 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-bnbps" event={"ID":"591acb44-aa58-4103-98f7-b68e067bc90d","Type":"ContainerStarted","Data":"a33b4c263a20f7a921d44ec94309d1461c0200aac833c6768045602466f890d8"} Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.738184 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" event={"ID":"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7","Type":"ContainerStarted","Data":"979d010e5c74d6c95143ab3feb6437a85863df434467a891bd61535683e27798"} Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.738994 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x28q5" event={"ID":"560c851e-0596-4c5f-9191-a7201149e335","Type":"ContainerStarted","Data":"b1dfe960b65d1668d0183e7138704fbf7e3a41d5bb69dbb7d18d2468d6973cc7"} Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.739887 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" event={"ID":"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b","Type":"ContainerStarted","Data":"c8a9c7be568c3dab9bc9bbbf8e74b5f147d711e01b1ae3c06cf499fddebf9a1f"} Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.740661 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-2nj65" event={"ID":"5d412a34-f608-4b2d-8485-197efa42d0f9","Type":"ContainerStarted","Data":"38bbc06e481398400ec9f11161eceb474bfe53998f46cb3cd76feca47722583c"} Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.741525 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tnf7n" event={"ID":"1d7a9657-cc3a-4c17-9312-9c7242ca9e95","Type":"ContainerStarted","Data":"b97b83e496386b8c021377519c217ba00e6192f8e1a83183ed43877656407230"} Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.758890 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cl7kj" podStartSLOduration=122.758872502 podStartE2EDuration="2m2.758872502s" podCreationTimestamp="2025-10-01 05:29:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:34.757094714 +0000 UTC m=+143.695073328" watchObservedRunningTime="2025-10-01 05:31:34.758872502 +0000 UTC m=+143.696851126" Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.809164 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:34 crc kubenswrapper[4661]: E1001 05:31:34.809308 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:35.30928599 +0000 UTC m=+144.247264604 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.810573 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:34 crc kubenswrapper[4661]: E1001 05:31:34.810876 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:35.310864674 +0000 UTC m=+144.248843288 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.811187 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-x4m4n"] Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.819977 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ctkvv"] Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.912222 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:34 crc kubenswrapper[4661]: E1001 05:31:34.912582 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:35.412566474 +0000 UTC m=+144.350545088 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:34 crc kubenswrapper[4661]: I1001 05:31:34.931192 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5mmdv"] Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.021333 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:35 crc kubenswrapper[4661]: E1001 05:31:35.021625 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:35.521613305 +0000 UTC m=+144.459591919 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.070359 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kqlm6"] Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.124115 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:35 crc kubenswrapper[4661]: E1001 05:31:35.124670 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:35.624650382 +0000 UTC m=+144.562628996 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.162292 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" podStartSLOduration=122.162271493 podStartE2EDuration="2m2.162271493s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:35.123454679 +0000 UTC m=+144.061433293" watchObservedRunningTime="2025-10-01 05:31:35.162271493 +0000 UTC m=+144.100250117" Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.225736 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:35 crc kubenswrapper[4661]: E1001 05:31:35.228022 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:35.726071675 +0000 UTC m=+144.664050289 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.327150 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:35 crc kubenswrapper[4661]: E1001 05:31:35.327433 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:35.827393195 +0000 UTC m=+144.765371809 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.430128 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:35 crc kubenswrapper[4661]: E1001 05:31:35.430812 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:35.930797823 +0000 UTC m=+144.868776437 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.532366 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:35 crc kubenswrapper[4661]: E1001 05:31:35.532481 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:36.032461962 +0000 UTC m=+144.970440576 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.532686 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:35 crc kubenswrapper[4661]: E1001 05:31:35.533058 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:36.033045249 +0000 UTC m=+144.971023863 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.594561 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-6226h"] Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.602843 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-qfb5h"] Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.608283 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-qz8x2"] Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.610371 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7ldmq"] Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.613969 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vpzn9"] Oct 01 05:31:35 crc kubenswrapper[4661]: W1001 05:31:35.625046 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod27b2dcf3_b40d_4c3d_a9a9_b68c77b96669.slice/crio-124ea792924a2e0092376dcee1a3965cfe406db43feaaf4257c6e26079824085 WatchSource:0}: Error finding container 124ea792924a2e0092376dcee1a3965cfe406db43feaaf4257c6e26079824085: Status 404 returned error can't find the container with id 124ea792924a2e0092376dcee1a3965cfe406db43feaaf4257c6e26079824085 Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.634048 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:35 crc kubenswrapper[4661]: E1001 05:31:35.634340 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:36.134327528 +0000 UTC m=+145.072306142 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:35 crc kubenswrapper[4661]: W1001 05:31:35.641309 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1f59b90_125d_4668_ba39_27c606b07de0.slice/crio-46a10542ea8ff895c4378fd106ae7b15642602ed4ad3766cd0a4cfb6060d99b0 WatchSource:0}: Error finding container 46a10542ea8ff895c4378fd106ae7b15642602ed4ad3766cd0a4cfb6060d99b0: Status 404 returned error can't find the container with id 46a10542ea8ff895c4378fd106ae7b15642602ed4ad3766cd0a4cfb6060d99b0 Oct 01 05:31:35 crc kubenswrapper[4661]: W1001 05:31:35.673678 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6a988c7a_aae7_4f17_bdb0_8395fd40008d.slice/crio-3bf1e8d747f28f928088dd7d0e40735503f64a565682f24a92daa62a9955f899 WatchSource:0}: Error finding container 3bf1e8d747f28f928088dd7d0e40735503f64a565682f24a92daa62a9955f899: Status 404 returned error can't find the container with id 3bf1e8d747f28f928088dd7d0e40735503f64a565682f24a92daa62a9955f899 Oct 01 05:31:35 crc kubenswrapper[4661]: W1001 05:31:35.677312 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb3b2fb02_1431_4fd3_83e9_37770bb57825.slice/crio-7b036b9b36ef220242ec489d1c843fc3054cabb654913672bc2560f299366b5a WatchSource:0}: Error finding container 7b036b9b36ef220242ec489d1c843fc3054cabb654913672bc2560f299366b5a: Status 404 returned error can't find the container with id 7b036b9b36ef220242ec489d1c843fc3054cabb654913672bc2560f299366b5a Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.735568 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:35 crc kubenswrapper[4661]: E1001 05:31:35.735974 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:36.235958217 +0000 UTC m=+145.173936831 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.792370 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-nwldz" event={"ID":"94f516db-b964-4e71-9fdf-8276800923ad","Type":"ContainerStarted","Data":"624e533a9acbf011c949389fbb367b1a0d31605b8417551f19c42853ea31c993"} Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.793228 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-nwldz" Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.799950 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kqlm6" event={"ID":"61ab52df-4ef3-4f24-a8f3-01c7fef8c99f","Type":"ContainerStarted","Data":"9bc72519c44f6de3b468b909d41e1e827ac2c1e46e262df9ecf7277acb9d745d"} Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.800379 4661 patch_prober.go:28] interesting pod/console-operator-58897d9998-nwldz container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.800416 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-nwldz" podUID="94f516db-b964-4e71-9fdf-8276800923ad" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.801759 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vpzn9" event={"ID":"6a988c7a-aae7-4f17-bdb0-8395fd40008d","Type":"ContainerStarted","Data":"3bf1e8d747f28f928088dd7d0e40735503f64a565682f24a92daa62a9955f899"} Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.814650 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4hjg" event={"ID":"6f227808-18ba-4538-aed5-d994ba07c2fb","Type":"ContainerStarted","Data":"ecdd33114bf1f3ca349246b8f309fa49ec3759ba0b9c96e53ee4ff4796bd16d9"} Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.814855 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-r5ghr"] Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.820532 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-qz8x2" event={"ID":"b3b2fb02-1431-4fd3-83e9-37770bb57825","Type":"ContainerStarted","Data":"7b036b9b36ef220242ec489d1c843fc3054cabb654913672bc2560f299366b5a"} Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.822406 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-sppsj" event={"ID":"0c21d97e-1221-464d-ae54-56ea6e626e00","Type":"ContainerStarted","Data":"401b7a26ea66960ba841fc5429dc0da914affd68d8a3cf7633d988bf41a0721f"} Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.822693 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c5cbp"] Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.835807 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-md22d" event={"ID":"8c150ea9-cd3d-4dab-9701-e7bfef917b0e","Type":"ContainerStarted","Data":"4376a827b7c8035abeb53f996b697dcb950118f20ae568c46aedfea2421298ff"} Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.837882 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:35 crc kubenswrapper[4661]: E1001 05:31:35.838703 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:36.338679666 +0000 UTC m=+145.276658290 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.851254 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zcpdn" event={"ID":"34ae1ed8-796d-4296-b003-aed0d5b82e52","Type":"ContainerStarted","Data":"04579513b9c89e54f3a8f3a5360f6cf6c2164824344932560f3da20ca1b47f78"} Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.852618 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-6226h" event={"ID":"27b2dcf3-b40d-4c3d-a9a9-b68c77b96669","Type":"ContainerStarted","Data":"124ea792924a2e0092376dcee1a3965cfe406db43feaaf4257c6e26079824085"} Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.864177 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz"] Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.874049 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-nkksl" event={"ID":"b5dd7e68-1886-4112-9923-bc135c6d5302","Type":"ContainerStarted","Data":"db86f28ebb52b3ef172d72e5f1db50b5897889ae41f0d59667c5eebe026a9434"} Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.878541 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" event={"ID":"f1f59b90-125d-4668-ba39-27c606b07de0","Type":"ContainerStarted","Data":"46a10542ea8ff895c4378fd106ae7b15642602ed4ad3766cd0a4cfb6060d99b0"} Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.887122 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-nwldz" podStartSLOduration=122.88709844 podStartE2EDuration="2m2.88709844s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:35.831769148 +0000 UTC m=+144.769747772" watchObservedRunningTime="2025-10-01 05:31:35.88709844 +0000 UTC m=+144.825077054" Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.888750 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:35 crc kubenswrapper[4661]: E1001 05:31:35.889486 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:36.389453124 +0000 UTC m=+145.327431738 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.903886 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ngdlf"] Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.909295 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-md22d" podStartSLOduration=122.909279482 podStartE2EDuration="2m2.909279482s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:35.863904341 +0000 UTC m=+144.801882955" watchObservedRunningTime="2025-10-01 05:31:35.909279482 +0000 UTC m=+144.847258096" Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.910805 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-7mdcj"] Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.914374 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ctkvv" event={"ID":"a1b696bc-9f56-41ca-a537-532c2575e5d0","Type":"ContainerStarted","Data":"30e12c3a8eaab4e5f0c71181ed7713a9d35ecbdd336ad9421b78f4e77e86f974"} Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.919677 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zh9kb"] Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.921839 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-mzsmw"] Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.924279 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-bnbps" podStartSLOduration=122.924264399 podStartE2EDuration="2m2.924264399s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:35.914667118 +0000 UTC m=+144.852645732" watchObservedRunningTime="2025-10-01 05:31:35.924264399 +0000 UTC m=+144.862243013" Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.925175 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-p6wgf"] Oct 01 05:31:35 crc kubenswrapper[4661]: W1001 05:31:35.928758 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd7c80f42_b230_4eb5_a285_b6e4a60a34ad.slice/crio-02c33fdc36ea9517396a70b534b80b55104c5eca95c496de6c1660602d31c639 WatchSource:0}: Error finding container 02c33fdc36ea9517396a70b534b80b55104c5eca95c496de6c1660602d31c639: Status 404 returned error can't find the container with id 02c33fdc36ea9517396a70b534b80b55104c5eca95c496de6c1660602d31c639 Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.929036 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.929822 4661 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-mqx4x container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.929860 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" podUID="5731cb34-0698-48be-a662-79dd89e808b2" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.931692 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-x4m4n" event={"ID":"02fc1037-823e-4082-b228-eb5e00b98f3b","Type":"ContainerStarted","Data":"34f800d94e4e5d44b65f26d4d4165133b81bda392789bbc634691c6d842c8971"} Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.936955 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-plvzs" event={"ID":"147f1c2a-2e7c-4443-a428-1ac5222baf00","Type":"ContainerStarted","Data":"99bc98ef13e3b289783a1b995e4a568fc8efb297888239400a023bf2a2bcf753"} Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.943498 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rm9mt" event={"ID":"0e2a7348-cb1f-4ad4-8e45-82e8386569a9","Type":"ContainerStarted","Data":"90d625b98067f77347fb22d25a42fad823ca9dca6858806138fd5b00f4f6faee"} Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.946684 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" podStartSLOduration=122.946671477 podStartE2EDuration="2m2.946671477s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:35.944287653 +0000 UTC m=+144.882266267" watchObservedRunningTime="2025-10-01 05:31:35.946671477 +0000 UTC m=+144.884650091" Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.949152 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-2nj65" event={"ID":"5d412a34-f608-4b2d-8485-197efa42d0f9","Type":"ContainerStarted","Data":"7a0db57f5f8310a450162daa69e3124bc26e2267657840e16d35a0b30fdc4809"} Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.949983 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-2nj65" Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.951487 4661 patch_prober.go:28] interesting pod/downloads-7954f5f757-2nj65 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.27:8080/\": dial tcp 10.217.0.27:8080: connect: connection refused" start-of-body= Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.951527 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2nj65" podUID="5d412a34-f608-4b2d-8485-197efa42d0f9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.27:8080/\": dial tcp 10.217.0.27:8080: connect: connection refused" Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.967471 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" event={"ID":"7824f777-df2b-46ba-a9ea-6a428351d121","Type":"ContainerStarted","Data":"1675103ebfb9b91b8a926c74a44f0ef0a219a7ac5ce8d9bdfb7bfeaab8fb8ce6"} Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.971761 4661 generic.go:334] "Generic (PLEG): container finished" podID="1d7a9657-cc3a-4c17-9312-9c7242ca9e95" containerID="3d68274853102d3d949e9792467c93b41b979b5fb0d059706cd8c0849353047a" exitCode=0 Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.972571 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tnf7n" event={"ID":"1d7a9657-cc3a-4c17-9312-9c7242ca9e95","Type":"ContainerDied","Data":"3d68274853102d3d949e9792467c93b41b979b5fb0d059706cd8c0849353047a"} Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.979305 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f"] Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.983615 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-plvzs" podStartSLOduration=122.98360242 podStartE2EDuration="2m2.98360242s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:35.977782111 +0000 UTC m=+144.915760725" watchObservedRunningTime="2025-10-01 05:31:35.98360242 +0000 UTC m=+144.921581034" Oct 01 05:31:35 crc kubenswrapper[4661]: I1001 05:31:35.993106 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:35 crc kubenswrapper[4661]: E1001 05:31:35.995422 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:36.49540775 +0000 UTC m=+145.433386364 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.001191 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" event={"ID":"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b","Type":"ContainerStarted","Data":"85943220f86eef84f22c7dfd1445274e2600a4ae5f846b551ffd447d796fffed"} Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.002972 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.004941 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5mmdv" event={"ID":"b0dc486f-e096-48b2-9708-dd01ff18cfef","Type":"ContainerStarted","Data":"9d1b98ebc72f493cd40cdca094ee0a674bc53b720f5fe4e43c47b38494ee6f42"} Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.004977 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5mmdv" event={"ID":"b0dc486f-e096-48b2-9708-dd01ff18cfef","Type":"ContainerStarted","Data":"e88c7293bb616f502ffb10a93fb4e2d23ec88053692f79778bd4abc66c63f996"} Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.009126 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-shn64" event={"ID":"7da01014-205a-4c43-8640-653fd3b65c0a","Type":"ContainerStarted","Data":"8a4793500c53077c340d1baf20a1a115446eb121bd4965ca8b113ed343363236"} Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.013606 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-nzndp" event={"ID":"ed4a1d37-47d2-4949-8841-4ab7f929e6fa","Type":"ContainerStarted","Data":"d399a3f67feae26beda6bc8b9c922c53981052ec227dee3aeb28376af2c9e674"} Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.016077 4661 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-dbmt2 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.016122 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" podUID="50a192ea-ce64-4d8c-b3e3-a19ef658aa2b" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.021575 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rm9mt" podStartSLOduration=123.02156361 podStartE2EDuration="2m3.02156361s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:36.018372934 +0000 UTC m=+144.956351548" watchObservedRunningTime="2025-10-01 05:31:36.02156361 +0000 UTC m=+144.959542234" Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.025130 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7ldmq" event={"ID":"653d6195-02ef-4ded-a397-9dd414f5a66e","Type":"ContainerStarted","Data":"c2e31ffce1054ea24e7bb43b0a8e6f53a675c1e03683586bb5fa22b9cd8a9798"} Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.061602 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" podStartSLOduration=123.061583776 podStartE2EDuration="2m3.061583776s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:36.059697116 +0000 UTC m=+144.997675730" watchObservedRunningTime="2025-10-01 05:31:36.061583776 +0000 UTC m=+144.999562390" Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.095685 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:36 crc kubenswrapper[4661]: E1001 05:31:36.096910 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:36.596896505 +0000 UTC m=+145.534875119 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.113463 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-nzndp" podStartSLOduration=5.113434814 podStartE2EDuration="5.113434814s" podCreationTimestamp="2025-10-01 05:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:36.101656205 +0000 UTC m=+145.039634829" watchObservedRunningTime="2025-10-01 05:31:36.113434814 +0000 UTC m=+145.051413448" Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.146151 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-2nj65" podStartSLOduration=123.146129952 podStartE2EDuration="2m3.146129952s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:36.144912799 +0000 UTC m=+145.082891413" watchObservedRunningTime="2025-10-01 05:31:36.146129952 +0000 UTC m=+145.084108566" Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.197237 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:36 crc kubenswrapper[4661]: E1001 05:31:36.197723 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:36.697702382 +0000 UTC m=+145.635680996 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.227210 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-shn64" podStartSLOduration=123.227197262 podStartE2EDuration="2m3.227197262s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:36.226319109 +0000 UTC m=+145.164297723" watchObservedRunningTime="2025-10-01 05:31:36.227197262 +0000 UTC m=+145.165175876" Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.298711 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:36 crc kubenswrapper[4661]: E1001 05:31:36.301942 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:36.801922791 +0000 UTC m=+145.739901405 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.399888 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:36 crc kubenswrapper[4661]: E1001 05:31:36.400735 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:36.900695702 +0000 UTC m=+145.838674316 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.501535 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:36 crc kubenswrapper[4661]: E1001 05:31:36.502152 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:37.002139577 +0000 UTC m=+145.940118191 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.606143 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:36 crc kubenswrapper[4661]: E1001 05:31:36.606479 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:37.106463788 +0000 UTC m=+146.044442402 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.680207 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-shn64" Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.682082 4661 patch_prober.go:28] interesting pod/router-default-5444994796-shn64 container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.682186 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-shn64" podUID="7da01014-205a-4c43-8640-653fd3b65c0a" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.707321 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:36 crc kubenswrapper[4661]: E1001 05:31:36.707758 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:37.207742168 +0000 UTC m=+146.145720782 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.809903 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:36 crc kubenswrapper[4661]: E1001 05:31:36.810117 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:37.310088467 +0000 UTC m=+146.248067081 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.810243 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:36 crc kubenswrapper[4661]: E1001 05:31:36.810525 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:37.310504478 +0000 UTC m=+146.248483092 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.907665 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.908046 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.911652 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:36 crc kubenswrapper[4661]: E1001 05:31:36.911811 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:37.411791987 +0000 UTC m=+146.349770611 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.911954 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:36 crc kubenswrapper[4661]: E1001 05:31:36.912257 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:37.412244309 +0000 UTC m=+146.350222913 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:36 crc kubenswrapper[4661]: I1001 05:31:36.915043 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.013222 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:37 crc kubenswrapper[4661]: E1001 05:31:37.013410 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:37.513386365 +0000 UTC m=+146.451364979 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.013693 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:37 crc kubenswrapper[4661]: E1001 05:31:37.014006 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:37.513992821 +0000 UTC m=+146.451971435 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.032450 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kqlm6" event={"ID":"61ab52df-4ef3-4f24-a8f3-01c7fef8c99f","Type":"ContainerStarted","Data":"8c509bf472dfcd955eb30b78370e934661b40e06c08327c5f072cb48f255883a"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.034088 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-7mdcj" event={"ID":"f4b45a54-4eb0-4a00-8ba2-bde133539b24","Type":"ContainerStarted","Data":"9484177b8f5937de526e4df3123f50967b9cae514e9ce6eca5ee394858054c50"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.034118 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-7mdcj" event={"ID":"f4b45a54-4eb0-4a00-8ba2-bde133539b24","Type":"ContainerStarted","Data":"883e2919a5537c8388ced10a08b71a0bdc1d5e651dbddb2b34232f1530a36a3d"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.034129 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-7mdcj" event={"ID":"f4b45a54-4eb0-4a00-8ba2-bde133539b24","Type":"ContainerStarted","Data":"9edb4a3be1d85cdf5d6e451132b79a1ce98293d082529bd71a11dce8382f5ee0"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.034241 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-7mdcj" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.035778 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-x4m4n" event={"ID":"02fc1037-823e-4082-b228-eb5e00b98f3b","Type":"ContainerStarted","Data":"c68b7c9f69d3a849a845f425f29eb7096b222e60275f5feac05d8c0d0893ee54"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.035810 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-x4m4n" event={"ID":"02fc1037-823e-4082-b228-eb5e00b98f3b","Type":"ContainerStarted","Data":"4125f7735479f60812076571b6a121741040c5b2823726204170ffa75b8c87d1"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.036799 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-6226h" event={"ID":"27b2dcf3-b40d-4c3d-a9a9-b68c77b96669","Type":"ContainerStarted","Data":"0cc6f65fd462ee1a3ce437db94c07494e90432223afee0b9410e735e29e5f5e9"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.037990 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4hjg" event={"ID":"6f227808-18ba-4538-aed5-d994ba07c2fb","Type":"ContainerStarted","Data":"e8f76618bc5dba45d57f1f355590c6487eb58630399dd70037bc1e153fcf8427"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.039403 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zcpdn" event={"ID":"34ae1ed8-796d-4296-b003-aed0d5b82e52","Type":"ContainerStarted","Data":"2fd4d1c06e24df6bf1741d57b077e7ff8f5269825e3a84495963c4a0a4955d97"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.040603 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ctkvv" event={"ID":"a1b696bc-9f56-41ca-a537-532c2575e5d0","Type":"ContainerStarted","Data":"87889544a1a20cbc77208aef06486e63be6fc06cfed6916c01c2b19de4be9366"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.040645 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ctkvv" event={"ID":"a1b696bc-9f56-41ca-a537-532c2575e5d0","Type":"ContainerStarted","Data":"33c39bf925ddda46e6ee0eb411a44db6d082e8fb516258aa7edb2be427677a9e"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.041736 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f" event={"ID":"a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90","Type":"ContainerStarted","Data":"35361bf776a8fa519f88223547591f0580f105501fabab78fb1efdb7bd4ded70"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.041775 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f" event={"ID":"a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90","Type":"ContainerStarted","Data":"2b1f9c28b440c6b5a8e177b8f9c705831b7a36ec17a7003470fdb2d5666161af"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.041916 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.043111 4661 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-knw9f container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.43:5443/healthz\": dial tcp 10.217.0.43:5443: connect: connection refused" start-of-body= Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.043155 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f" podUID="a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.43:5443/healthz\": dial tcp 10.217.0.43:5443: connect: connection refused" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.043570 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-plvzs" event={"ID":"147f1c2a-2e7c-4443-a428-1ac5222baf00","Type":"ContainerStarted","Data":"721175c80ade966dafa2e1ba31a8897a852e0ca5cffb9f4a90213db76a7f5f96"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.045073 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tnf7n" event={"ID":"1d7a9657-cc3a-4c17-9312-9c7242ca9e95","Type":"ContainerStarted","Data":"029c9b2908f6901ce9c26d91a25e7e2e86355104c1177dc2acd2854bb852dbd2"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.045185 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tnf7n" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.045814 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" event={"ID":"d5278166-ab19-4464-87d6-3fc6fe335855","Type":"ContainerStarted","Data":"90d9e9210c7040215ef7bd6600891905a102332b318f025ed074f047edeb73fe"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.047444 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-sppsj" event={"ID":"0c21d97e-1221-464d-ae54-56ea6e626e00","Type":"ContainerStarted","Data":"cd70db37cfe866174e1a46931634be3d7c01451ec85c2cbcabb70e67a6b587ba"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.054877 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x28q5" event={"ID":"560c851e-0596-4c5f-9191-a7201149e335","Type":"ContainerStarted","Data":"43170cc905accdbf9af49b247490b9bb2050d232743366464df3160189030880"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.054923 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x28q5" event={"ID":"560c851e-0596-4c5f-9191-a7201149e335","Type":"ContainerStarted","Data":"87b333954064d51c29aa561762a8e1e3d7fe1957be407a92263a36ce18724932"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.057182 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-bnbps" event={"ID":"591acb44-aa58-4103-98f7-b68e067bc90d","Type":"ContainerStarted","Data":"6e81c435e54bd56cd782cb6b7531a55595c06f5651e8f6cf858c10cd8a68dfc6"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.058453 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz" event={"ID":"a731ee04-6aba-49d9-b8b0-392d31d55da2","Type":"ContainerStarted","Data":"33db51cd837328066456c67b6fbda93fe880a51f254777aec2aaa59817582f10"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.058491 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz" event={"ID":"a731ee04-6aba-49d9-b8b0-392d31d55da2","Type":"ContainerStarted","Data":"6c2590f2e33bfdf719558afa7696c68fbbe9aca2f905913db707bed0c64a8fe2"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.063091 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-nkksl" event={"ID":"b5dd7e68-1886-4112-9923-bc135c6d5302","Type":"ContainerStarted","Data":"f733fc9bddcd7e5cbbca3d97d01b156191b3cdfa7e8d4ebb123fdb4cad8e8a5d"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.064686 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kqlm6" podStartSLOduration=124.064670327 podStartE2EDuration="2m4.064670327s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.064268486 +0000 UTC m=+146.002247100" watchObservedRunningTime="2025-10-01 05:31:37.064670327 +0000 UTC m=+146.002648941" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.068378 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" event={"ID":"5731cb34-0698-48be-a662-79dd89e808b2","Type":"ContainerStarted","Data":"f955e8e998da0ab0044056abe42e91cabb7b9e6c666c1cc908bffaad94ddaca3"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.068868 4661 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-mqx4x container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.068902 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" podUID="5731cb34-0698-48be-a662-79dd89e808b2" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.070919 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" event={"ID":"7824f777-df2b-46ba-a9ea-6a428351d121","Type":"ContainerStarted","Data":"f693091bc86b473871be8b1e22fd2f0bb016508e1ca320d786908950a44f279c"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.072624 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vpzn9" event={"ID":"6a988c7a-aae7-4f17-bdb0-8395fd40008d","Type":"ContainerStarted","Data":"5369b12d6958cddcbc52d5737d4d91b319d614867ce879143b21d1444d66fb1a"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.073779 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c5cbp" event={"ID":"276ccab1-820f-4a4f-81b2-fdca6aa59628","Type":"ContainerStarted","Data":"626eaed0777a2fc1e56f4c83c90b02760a6afe2fa5f43bfd6a8e3f81973c75ec"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.073804 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c5cbp" event={"ID":"276ccab1-820f-4a4f-81b2-fdca6aa59628","Type":"ContainerStarted","Data":"8e44b7d6670819e926bf430254f7b5b79919d7c16ed0c901c499592ad92c955f"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.075542 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" event={"ID":"3b56c64d-0bba-4d20-a6ae-e6c9349d0c04","Type":"ContainerStarted","Data":"ea408f0819fd82e8baab6f0fe05c700b776828763e946e1d9efa450e2eb536e6"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.075567 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" event={"ID":"3b56c64d-0bba-4d20-a6ae-e6c9349d0c04","Type":"ContainerStarted","Data":"3afcf924687c003a44d02e9ff6200755026688019a06b7919c1d613ce9e6986a"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.075918 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.077551 4661 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-r5ghr container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.26:8080/healthz\": dial tcp 10.217.0.26:8080: connect: connection refused" start-of-body= Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.077599 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" podUID="3b56c64d-0bba-4d20-a6ae-e6c9349d0c04" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.26:8080/healthz\": dial tcp 10.217.0.26:8080: connect: connection refused" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.080151 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-222p4" event={"ID":"fc570c7d-b32c-4d0b-ba06-73b13143cd0c","Type":"ContainerStarted","Data":"35ae60c2121d1507b2ff56ba8f2f51fcf51c669f41a758db7c02cbbb72cd9232"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.083857 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-nzndp" event={"ID":"ed4a1d37-47d2-4949-8841-4ab7f929e6fa","Type":"ContainerStarted","Data":"73533b1c8bd9ddaac817318d110f81cec4d4ee9ca350566bc75b64b537bf6daa"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.085882 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-p6wgf" event={"ID":"53107074-138a-4df9-ab42-9e09b5a257d3","Type":"ContainerStarted","Data":"af9a8c729d20c953357ef224808a3319b5552cb66f4b62f620d8d33fa7c6c161"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.085924 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-p6wgf" event={"ID":"53107074-138a-4df9-ab42-9e09b5a257d3","Type":"ContainerStarted","Data":"7c2c3234a072adffecf389b9748d30c372d17e80f41f72fc8693d10b79a86454"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.085935 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-p6wgf" event={"ID":"53107074-138a-4df9-ab42-9e09b5a257d3","Type":"ContainerStarted","Data":"09b3f66815dda94bdfe50551f5a887b9e0b92177aabc30e540d1efdbbbe96a29"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.091732 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vtztn" event={"ID":"3b8c7190-1906-4f8f-b93a-85ad0b277e8f","Type":"ContainerStarted","Data":"89236ed18924fbcbd22ecfd77ee9f35fec7d540a1f2f82dd2e47ba7aa372f2c8"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.093411 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" event={"ID":"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7","Type":"ContainerStarted","Data":"71794a8549dea8ba3bfba8026baa29b280daa34c62bbf54339f7a4626ba47f21"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.093949 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.094865 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-nkksl" podStartSLOduration=124.094854256 podStartE2EDuration="2m4.094854256s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.091699381 +0000 UTC m=+146.029677995" watchObservedRunningTime="2025-10-01 05:31:37.094854256 +0000 UTC m=+146.032832870" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.095672 4661 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-j9mhf container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.7:6443/healthz\": dial tcp 10.217.0.7:6443: connect: connection refused" start-of-body= Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.095709 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" podUID="2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.7:6443/healthz\": dial tcp 10.217.0.7:6443: connect: connection refused" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.099519 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-2bwjb" event={"ID":"9a25b149-9f1f-4723-9c39-e6070685311c","Type":"ContainerStarted","Data":"080cdb62103a101469448a0f2e98baa80a58f695a389c67a0f6cecb156ba97d3"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.099557 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-2bwjb" event={"ID":"9a25b149-9f1f-4723-9c39-e6070685311c","Type":"ContainerStarted","Data":"211c98978a69ee619f372365e23821876a30264d88b6bada71b111692b0c25ae"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.103869 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5mmdv" event={"ID":"b0dc486f-e096-48b2-9708-dd01ff18cfef","Type":"ContainerStarted","Data":"3a214b90a3bde395c0575d6b82aac371a736783d18e0c089a04f3aaa27be7ef4"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.103941 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5mmdv" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.105495 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" event={"ID":"f1f59b90-125d-4668-ba39-27c606b07de0","Type":"ContainerStarted","Data":"9c033eb12cc47aaead3ad370de8696c78bddf33ff0a4963614e906061f5d360b"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.106651 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zh9kb" event={"ID":"ee2a5e15-cecf-470a-9f59-acef3b3e87a6","Type":"ContainerStarted","Data":"a4578fbf0c059efdeb8407300f7f633bbf0487e6e29e020d01fdc0929ca2d095"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.106676 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zh9kb" event={"ID":"ee2a5e15-cecf-470a-9f59-acef3b3e87a6","Type":"ContainerStarted","Data":"7a6c98b1491df79a9af2161c306b69a8657ceb80643a8eabd72e4810b865f16e"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.107378 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zh9kb" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.108362 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-qz8x2" event={"ID":"b3b2fb02-1431-4fd3-83e9-37770bb57825","Type":"ContainerStarted","Data":"f720b313f099c2c8f59204d5282183a764ed907e370e8b1c413cd1909b2b7c00"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.108655 4661 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-zh9kb container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.28:8443/healthz\": dial tcp 10.217.0.28:8443: connect: connection refused" start-of-body= Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.108685 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zh9kb" podUID="ee2a5e15-cecf-470a-9f59-acef3b3e87a6" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.28:8443/healthz\": dial tcp 10.217.0.28:8443: connect: connection refused" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.113809 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7ldmq" event={"ID":"653d6195-02ef-4ded-a397-9dd414f5a66e","Type":"ContainerStarted","Data":"4d77b76d00b1b1a4888c5dd3fa4acfd14d68d45318082324db63c03b88b9c49b"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.114562 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7ldmq" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.115341 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:37 crc kubenswrapper[4661]: E1001 05:31:37.116936 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:37.616919706 +0000 UTC m=+146.554898320 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.117170 4661 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-7ldmq container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.42:8443/healthz\": dial tcp 10.217.0.42:8443: connect: connection refused" start-of-body= Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.117222 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7ldmq" podUID="653d6195-02ef-4ded-a397-9dd414f5a66e" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.42:8443/healthz\": dial tcp 10.217.0.42:8443: connect: connection refused" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.126726 4661 patch_prober.go:28] interesting pod/console-operator-58897d9998-nwldz container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.126822 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-nwldz" podUID="94f516db-b964-4e71-9fdf-8276800923ad" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.128892 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ngdlf" event={"ID":"d7c80f42-b230-4eb5-a285-b6e4a60a34ad","Type":"ContainerStarted","Data":"2d2e9d27f3be6c1f412ab9ac14e69085b11d400e6eec98d4e89d82ff08a2fd60"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.129200 4661 patch_prober.go:28] interesting pod/downloads-7954f5f757-2nj65 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.27:8080/\": dial tcp 10.217.0.27:8080: connect: connection refused" start-of-body= Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.129248 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2nj65" podUID="5d412a34-f608-4b2d-8485-197efa42d0f9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.27:8080/\": dial tcp 10.217.0.27:8080: connect: connection refused" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.129423 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ngdlf" event={"ID":"d7c80f42-b230-4eb5-a285-b6e4a60a34ad","Type":"ContainerStarted","Data":"02c33fdc36ea9517396a70b534b80b55104c5eca95c496de6c1660602d31c639"} Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.157778 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-kh62k" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.166772 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zcpdn" podStartSLOduration=124.166756879 podStartE2EDuration="2m4.166756879s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.164004494 +0000 UTC m=+146.101983118" watchObservedRunningTime="2025-10-01 05:31:37.166756879 +0000 UTC m=+146.104735493" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.213353 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f" podStartSLOduration=124.213337904 podStartE2EDuration="2m4.213337904s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.21212422 +0000 UTC m=+146.150102844" watchObservedRunningTime="2025-10-01 05:31:37.213337904 +0000 UTC m=+146.151316508" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.217937 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.220135 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:37 crc kubenswrapper[4661]: E1001 05:31:37.220518 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:37.720489497 +0000 UTC m=+146.658468111 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.222314 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.229347 4661 patch_prober.go:28] interesting pod/apiserver-76f77b778f-gtlzc container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.20:8443/livez\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.229399 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" podUID="7824f777-df2b-46ba-a9ea-6a428351d121" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.20:8443/livez\": dial tcp 10.217.0.20:8443: connect: connection refused" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.234751 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-x4m4n" podStartSLOduration=124.234734384 podStartE2EDuration="2m4.234734384s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.233161182 +0000 UTC m=+146.171139796" watchObservedRunningTime="2025-10-01 05:31:37.234734384 +0000 UTC m=+146.172712988" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.258809 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz" podStartSLOduration=97.258780677 podStartE2EDuration="1m37.258780677s" podCreationTimestamp="2025-10-01 05:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.258080537 +0000 UTC m=+146.196059151" watchObservedRunningTime="2025-10-01 05:31:37.258780677 +0000 UTC m=+146.196759281" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.280238 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-7mdcj" podStartSLOduration=6.280223858 podStartE2EDuration="6.280223858s" podCreationTimestamp="2025-10-01 05:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.274589035 +0000 UTC m=+146.212567649" watchObservedRunningTime="2025-10-01 05:31:37.280223858 +0000 UTC m=+146.218202472" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.303328 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x28q5" podStartSLOduration=124.303315256 podStartE2EDuration="2m4.303315256s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.301693642 +0000 UTC m=+146.239672256" watchObservedRunningTime="2025-10-01 05:31:37.303315256 +0000 UTC m=+146.241293870" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.319589 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:37 crc kubenswrapper[4661]: E1001 05:31:37.319755 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:37.819730331 +0000 UTC m=+146.757708945 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.319921 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:37 crc kubenswrapper[4661]: E1001 05:31:37.320287 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:37.820273286 +0000 UTC m=+146.758251890 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.327678 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ctkvv" podStartSLOduration=125.327661726 podStartE2EDuration="2m5.327661726s" podCreationTimestamp="2025-10-01 05:29:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.321024607 +0000 UTC m=+146.259003221" watchObservedRunningTime="2025-10-01 05:31:37.327661726 +0000 UTC m=+146.265640340" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.346898 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4hjg" podStartSLOduration=125.346883948 podStartE2EDuration="2m5.346883948s" podCreationTimestamp="2025-10-01 05:29:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.344778521 +0000 UTC m=+146.282757135" watchObservedRunningTime="2025-10-01 05:31:37.346883948 +0000 UTC m=+146.284862562" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.378988 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-sppsj" podStartSLOduration=124.37897367 podStartE2EDuration="2m4.37897367s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.376876183 +0000 UTC m=+146.314854797" watchObservedRunningTime="2025-10-01 05:31:37.37897367 +0000 UTC m=+146.316952284" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.407413 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tnf7n" podStartSLOduration=124.407394731 podStartE2EDuration="2m4.407394731s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.405658824 +0000 UTC m=+146.343637438" watchObservedRunningTime="2025-10-01 05:31:37.407394731 +0000 UTC m=+146.345373345" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.420727 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:37 crc kubenswrapper[4661]: E1001 05:31:37.420856 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:37.920837347 +0000 UTC m=+146.858815961 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.421076 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:37 crc kubenswrapper[4661]: E1001 05:31:37.421371 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:37.921362281 +0000 UTC m=+146.859340895 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.425534 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.448943 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-6226h" podStartSLOduration=7.448927629 podStartE2EDuration="7.448927629s" podCreationTimestamp="2025-10-01 05:31:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.422320316 +0000 UTC m=+146.360298930" watchObservedRunningTime="2025-10-01 05:31:37.448927629 +0000 UTC m=+146.386906243" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.450684 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" podStartSLOduration=124.450677676 podStartE2EDuration="2m4.450677676s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.448295731 +0000 UTC m=+146.386274345" watchObservedRunningTime="2025-10-01 05:31:37.450677676 +0000 UTC m=+146.388656290" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.470565 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-qfb5h" podStartSLOduration=124.470532905 podStartE2EDuration="2m4.470532905s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.469348213 +0000 UTC m=+146.407326827" watchObservedRunningTime="2025-10-01 05:31:37.470532905 +0000 UTC m=+146.408511509" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.513170 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" podStartSLOduration=125.513151793 podStartE2EDuration="2m5.513151793s" podCreationTimestamp="2025-10-01 05:29:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.51122792 +0000 UTC m=+146.449206534" watchObservedRunningTime="2025-10-01 05:31:37.513151793 +0000 UTC m=+146.451130407" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.522188 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:37 crc kubenswrapper[4661]: E1001 05:31:37.522354 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:38.022320711 +0000 UTC m=+146.960299325 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.522673 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:37 crc kubenswrapper[4661]: E1001 05:31:37.523020 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:38.02301126 +0000 UTC m=+146.960989864 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.534731 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-222p4" podStartSLOduration=125.534718127 podStartE2EDuration="2m5.534718127s" podCreationTimestamp="2025-10-01 05:29:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.533246148 +0000 UTC m=+146.471224762" watchObservedRunningTime="2025-10-01 05:31:37.534718127 +0000 UTC m=+146.472696731" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.590596 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vtztn" podStartSLOduration=124.590578654 podStartE2EDuration="2m4.590578654s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.557344161 +0000 UTC m=+146.495322765" watchObservedRunningTime="2025-10-01 05:31:37.590578654 +0000 UTC m=+146.528557268" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.624379 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:37 crc kubenswrapper[4661]: E1001 05:31:37.624783 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:38.124753751 +0000 UTC m=+147.062732375 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.625045 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:37 crc kubenswrapper[4661]: E1001 05:31:37.625392 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:38.125364979 +0000 UTC m=+147.063343583 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.636229 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c5cbp" podStartSLOduration=124.636212972 podStartE2EDuration="2m4.636212972s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.590318587 +0000 UTC m=+146.528297201" watchObservedRunningTime="2025-10-01 05:31:37.636212972 +0000 UTC m=+146.574191586" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.661760 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7ldmq" podStartSLOduration=124.661742016 podStartE2EDuration="2m4.661742016s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.63978035 +0000 UTC m=+146.577758964" watchObservedRunningTime="2025-10-01 05:31:37.661742016 +0000 UTC m=+146.599720630" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.662961 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zh9kb" podStartSLOduration=124.662955799 podStartE2EDuration="2m4.662955799s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.661427237 +0000 UTC m=+146.599405851" watchObservedRunningTime="2025-10-01 05:31:37.662955799 +0000 UTC m=+146.600934413" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.688407 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ngdlf" podStartSLOduration=124.688392429 podStartE2EDuration="2m4.688392429s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.686276712 +0000 UTC m=+146.624255326" watchObservedRunningTime="2025-10-01 05:31:37.688392429 +0000 UTC m=+146.626371043" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.704002 4661 patch_prober.go:28] interesting pod/router-default-5444994796-shn64 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 05:31:37 crc kubenswrapper[4661]: [-]has-synced failed: reason withheld Oct 01 05:31:37 crc kubenswrapper[4661]: [+]process-running ok Oct 01 05:31:37 crc kubenswrapper[4661]: healthz check failed Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.704057 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-shn64" podUID="7da01014-205a-4c43-8640-653fd3b65c0a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.725590 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:37 crc kubenswrapper[4661]: E1001 05:31:37.725767 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:38.225740293 +0000 UTC m=+147.163718907 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.726003 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:37 crc kubenswrapper[4661]: E1001 05:31:37.726414 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:38.226407042 +0000 UTC m=+147.164385656 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.759534 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-p6wgf" podStartSLOduration=124.7595174 podStartE2EDuration="2m4.7595174s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.756825858 +0000 UTC m=+146.694804472" watchObservedRunningTime="2025-10-01 05:31:37.7595174 +0000 UTC m=+146.697496014" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.783205 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-2bwjb" podStartSLOduration=124.783182923 podStartE2EDuration="2m4.783182923s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.782392641 +0000 UTC m=+146.720371255" watchObservedRunningTime="2025-10-01 05:31:37.783182923 +0000 UTC m=+146.721161617" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.798100 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-qz8x2" podStartSLOduration=124.798079287 podStartE2EDuration="2m4.798079287s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.794382117 +0000 UTC m=+146.732360731" watchObservedRunningTime="2025-10-01 05:31:37.798079287 +0000 UTC m=+146.736057901" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.826906 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.827120 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vpzn9" podStartSLOduration=124.827105905 podStartE2EDuration="2m4.827105905s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.82583627 +0000 UTC m=+146.763814884" watchObservedRunningTime="2025-10-01 05:31:37.827105905 +0000 UTC m=+146.765084519" Oct 01 05:31:37 crc kubenswrapper[4661]: E1001 05:31:37.827780 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:38.327755603 +0000 UTC m=+147.265734217 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.857457 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5mmdv" podStartSLOduration=124.857438348 podStartE2EDuration="2m4.857438348s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.855937318 +0000 UTC m=+146.793915932" watchObservedRunningTime="2025-10-01 05:31:37.857438348 +0000 UTC m=+146.795416962" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.906416 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" podStartSLOduration=125.906399748 podStartE2EDuration="2m5.906399748s" podCreationTimestamp="2025-10-01 05:29:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:37.883047464 +0000 UTC m=+146.821026078" watchObservedRunningTime="2025-10-01 05:31:37.906399748 +0000 UTC m=+146.844378362" Oct 01 05:31:37 crc kubenswrapper[4661]: I1001 05:31:37.929547 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:37 crc kubenswrapper[4661]: E1001 05:31:37.929943 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:38.429927436 +0000 UTC m=+147.367906040 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.031131 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:38 crc kubenswrapper[4661]: E1001 05:31:38.031349 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:38.531306219 +0000 UTC m=+147.469284833 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.031494 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:38 crc kubenswrapper[4661]: E1001 05:31:38.031796 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:38.531783952 +0000 UTC m=+147.469762566 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.126865 4661 patch_prober.go:28] interesting pod/downloads-7954f5f757-2nj65 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.27:8080/\": dial tcp 10.217.0.27:8080: connect: connection refused" start-of-body= Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.126895 4661 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-mqx4x container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.126911 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2nj65" podUID="5d412a34-f608-4b2d-8485-197efa42d0f9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.27:8080/\": dial tcp 10.217.0.27:8080: connect: connection refused" Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.126956 4661 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-zh9kb container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.28:8443/healthz\": dial tcp 10.217.0.28:8443: connect: connection refused" start-of-body= Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.126953 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" podUID="5731cb34-0698-48be-a662-79dd89e808b2" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.126993 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zh9kb" podUID="ee2a5e15-cecf-470a-9f59-acef3b3e87a6" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.28:8443/healthz\": dial tcp 10.217.0.28:8443: connect: connection refused" Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.127488 4661 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-r5ghr container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.26:8080/healthz\": dial tcp 10.217.0.26:8080: connect: connection refused" start-of-body= Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.127507 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" podUID="3b56c64d-0bba-4d20-a6ae-e6c9349d0c04" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.26:8080/healthz\": dial tcp 10.217.0.26:8080: connect: connection refused" Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.128020 4661 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-knw9f container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.43:5443/healthz\": dial tcp 10.217.0.43:5443: connect: connection refused" start-of-body= Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.128119 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f" podUID="a58abfcd-35e2-40e7-ab44-6ec6fd7ddd90" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.43:5443/healthz\": dial tcp 10.217.0.43:5443: connect: connection refused" Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.128218 4661 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-7ldmq container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.42:8443/healthz\": dial tcp 10.217.0.42:8443: connect: connection refused" start-of-body= Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.128261 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7ldmq" podUID="653d6195-02ef-4ded-a397-9dd414f5a66e" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.42:8443/healthz\": dial tcp 10.217.0.42:8443: connect: connection refused" Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.132137 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:38 crc kubenswrapper[4661]: E1001 05:31:38.132216 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:38.632198278 +0000 UTC m=+147.570176892 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.132697 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:38 crc kubenswrapper[4661]: E1001 05:31:38.135322 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:38.635302472 +0000 UTC m=+147.573281086 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.234532 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:38 crc kubenswrapper[4661]: E1001 05:31:38.234670 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:38.734653588 +0000 UTC m=+147.672632202 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.234939 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:38 crc kubenswrapper[4661]: E1001 05:31:38.235375 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:38.735359757 +0000 UTC m=+147.673338371 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.335886 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:38 crc kubenswrapper[4661]: E1001 05:31:38.336201 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:38.836177044 +0000 UTC m=+147.774155658 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.336286 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:38 crc kubenswrapper[4661]: E1001 05:31:38.336555 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:38.836542934 +0000 UTC m=+147.774521548 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.437883 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:38 crc kubenswrapper[4661]: E1001 05:31:38.438186 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:38.938171194 +0000 UTC m=+147.876149808 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.538862 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:38 crc kubenswrapper[4661]: E1001 05:31:38.539138 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:39.039127074 +0000 UTC m=+147.977105688 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.639705 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:38 crc kubenswrapper[4661]: E1001 05:31:38.639958 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:39.139943901 +0000 UTC m=+148.077922515 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.682449 4661 patch_prober.go:28] interesting pod/router-default-5444994796-shn64 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 05:31:38 crc kubenswrapper[4661]: [-]has-synced failed: reason withheld Oct 01 05:31:38 crc kubenswrapper[4661]: [+]process-running ok Oct 01 05:31:38 crc kubenswrapper[4661]: healthz check failed Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.682502 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-shn64" podUID="7da01014-205a-4c43-8640-653fd3b65c0a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.740772 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:38 crc kubenswrapper[4661]: E1001 05:31:38.741131 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:39.241117438 +0000 UTC m=+148.179096052 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.841954 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:38 crc kubenswrapper[4661]: E1001 05:31:38.842185 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:39.342170611 +0000 UTC m=+148.280149225 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:38 crc kubenswrapper[4661]: I1001 05:31:38.943606 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:38 crc kubenswrapper[4661]: E1001 05:31:38.943888 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:39.443876331 +0000 UTC m=+148.381854935 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.044977 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:39 crc kubenswrapper[4661]: E1001 05:31:39.045155 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:39.54513305 +0000 UTC m=+148.483111664 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.045260 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.045360 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:39 crc kubenswrapper[4661]: E1001 05:31:39.045658 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:39.545643945 +0000 UTC m=+148.483622559 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.046097 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.127343 4661 patch_prober.go:28] interesting pod/console-operator-58897d9998-nwldz container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/readyz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.127402 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-nwldz" podUID="94f516db-b964-4e71-9fdf-8276800923ad" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/readyz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.127422 4661 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-j9mhf container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.7:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.127479 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" podUID="2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.7:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.146026 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" event={"ID":"d5278166-ab19-4464-87d6-3fc6fe335855","Type":"ContainerStarted","Data":"160081373f5f34daa68cdd2394cd9953abac10eda7817621dd1d19bcb266a475"} Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.146078 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" event={"ID":"d5278166-ab19-4464-87d6-3fc6fe335855","Type":"ContainerStarted","Data":"3708aff406ce9ce7ff3e4dc536f263e9e177b4c4df7cb318317dc06e41171c94"} Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.146726 4661 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-r5ghr container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.26:8080/healthz\": dial tcp 10.217.0.26:8080: connect: connection refused" start-of-body= Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.146787 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" podUID="3b56c64d-0bba-4d20-a6ae-e6c9349d0c04" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.26:8080/healthz\": dial tcp 10.217.0.26:8080: connect: connection refused" Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.146740 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:39 crc kubenswrapper[4661]: E1001 05:31:39.146874 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:39.646854482 +0000 UTC m=+148.584833086 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.147159 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.147205 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.147243 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.147367 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:39 crc kubenswrapper[4661]: E1001 05:31:39.147744 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:39.647733686 +0000 UTC m=+148.585712300 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.162893 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7ldmq" Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.175868 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.176515 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.176999 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.177151 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zh9kb" Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.187772 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.195940 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.249741 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:39 crc kubenswrapper[4661]: E1001 05:31:39.250835 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:39.750820345 +0000 UTC m=+148.688798959 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.287489 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.351901 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:39 crc kubenswrapper[4661]: E1001 05:31:39.352193 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:39.852180486 +0000 UTC m=+148.790159100 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.380580 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tnf7n" Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.453515 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:39 crc kubenswrapper[4661]: E1001 05:31:39.454138 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:39.954123393 +0000 UTC m=+148.892102007 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.554673 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:39 crc kubenswrapper[4661]: E1001 05:31:39.556328 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:40.056310798 +0000 UTC m=+148.994289412 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.656519 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:39 crc kubenswrapper[4661]: E1001 05:31:39.656910 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:40.156896358 +0000 UTC m=+149.094874972 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.684707 4661 patch_prober.go:28] interesting pod/router-default-5444994796-shn64 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 05:31:39 crc kubenswrapper[4661]: [-]has-synced failed: reason withheld Oct 01 05:31:39 crc kubenswrapper[4661]: [+]process-running ok Oct 01 05:31:39 crc kubenswrapper[4661]: healthz check failed Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.684754 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-shn64" podUID="7da01014-205a-4c43-8640-653fd3b65c0a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.757877 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:39 crc kubenswrapper[4661]: E1001 05:31:39.758247 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:40.258232409 +0000 UTC m=+149.196211023 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.859050 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:39 crc kubenswrapper[4661]: E1001 05:31:39.859287 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:40.359270862 +0000 UTC m=+149.297249476 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.859368 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:39 crc kubenswrapper[4661]: E1001 05:31:39.859666 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:40.359658602 +0000 UTC m=+149.297637216 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:39 crc kubenswrapper[4661]: I1001 05:31:39.963986 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:39 crc kubenswrapper[4661]: E1001 05:31:39.964194 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:40.464179719 +0000 UTC m=+149.402158333 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.065372 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:40 crc kubenswrapper[4661]: E1001 05:31:40.065848 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:40.5658369 +0000 UTC m=+149.503815514 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.078760 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:31:40 crc kubenswrapper[4661]: W1001 05:31:40.101813 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-57e0142d2cdee13bb5ec163385c126319979ab49f6b01a0d88ceebb14e4f2201 WatchSource:0}: Error finding container 57e0142d2cdee13bb5ec163385c126319979ab49f6b01a0d88ceebb14e4f2201: Status 404 returned error can't find the container with id 57e0142d2cdee13bb5ec163385c126319979ab49f6b01a0d88ceebb14e4f2201 Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.152093 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"93d8c32dabfa6062a093e1e336dd063ebb147df80372653f028a1833840aca72"} Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.156987 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" event={"ID":"d5278166-ab19-4464-87d6-3fc6fe335855","Type":"ContainerStarted","Data":"fab27d945aff1570a75297b7958b25ce855c8497b4a53edd653cd1d41336bf0c"} Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.157838 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"57e0142d2cdee13bb5ec163385c126319979ab49f6b01a0d88ceebb14e4f2201"} Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.159668 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"324a87c3829e5a6a05b44ba8ca0ed28d3775800537cfb524c051580190be8b5c"} Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.170136 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:40 crc kubenswrapper[4661]: E1001 05:31:40.171163 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:40.671146288 +0000 UTC m=+149.609124902 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.271344 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:40 crc kubenswrapper[4661]: E1001 05:31:40.271646 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:40.771620066 +0000 UTC m=+149.709598670 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.323737 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-nwqhx"] Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.325035 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nwqhx" Oct 01 05:31:40 crc kubenswrapper[4661]: W1001 05:31:40.327073 4661 reflector.go:561] object-"openshift-marketplace"/"community-operators-dockercfg-dmngl": failed to list *v1.Secret: secrets "community-operators-dockercfg-dmngl" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-marketplace": no relationship found between node 'crc' and this object Oct 01 05:31:40 crc kubenswrapper[4661]: E1001 05:31:40.327107 4661 reflector.go:158] "Unhandled Error" err="object-\"openshift-marketplace\"/\"community-operators-dockercfg-dmngl\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"community-operators-dockercfg-dmngl\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-marketplace\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.372860 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:40 crc kubenswrapper[4661]: E1001 05:31:40.373036 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:40.873012978 +0000 UTC m=+149.810991592 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.373148 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f018e51-8c87-444b-9d63-3dadae9cd246-catalog-content\") pod \"community-operators-nwqhx\" (UID: \"0f018e51-8c87-444b-9d63-3dadae9cd246\") " pod="openshift-marketplace/community-operators-nwqhx" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.373312 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.373352 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f018e51-8c87-444b-9d63-3dadae9cd246-utilities\") pod \"community-operators-nwqhx\" (UID: \"0f018e51-8c87-444b-9d63-3dadae9cd246\") " pod="openshift-marketplace/community-operators-nwqhx" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.373375 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfpc4\" (UniqueName: \"kubernetes.io/projected/0f018e51-8c87-444b-9d63-3dadae9cd246-kube-api-access-xfpc4\") pod \"community-operators-nwqhx\" (UID: \"0f018e51-8c87-444b-9d63-3dadae9cd246\") " pod="openshift-marketplace/community-operators-nwqhx" Oct 01 05:31:40 crc kubenswrapper[4661]: E1001 05:31:40.373699 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:40.873691727 +0000 UTC m=+149.811670341 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.410258 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nwqhx"] Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.430063 4661 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.474193 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:40 crc kubenswrapper[4661]: E1001 05:31:40.474379 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:40.97435393 +0000 UTC m=+149.912332544 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.474424 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f018e51-8c87-444b-9d63-3dadae9cd246-catalog-content\") pod \"community-operators-nwqhx\" (UID: \"0f018e51-8c87-444b-9d63-3dadae9cd246\") " pod="openshift-marketplace/community-operators-nwqhx" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.474594 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.474684 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f018e51-8c87-444b-9d63-3dadae9cd246-utilities\") pod \"community-operators-nwqhx\" (UID: \"0f018e51-8c87-444b-9d63-3dadae9cd246\") " pod="openshift-marketplace/community-operators-nwqhx" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.474718 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfpc4\" (UniqueName: \"kubernetes.io/projected/0f018e51-8c87-444b-9d63-3dadae9cd246-kube-api-access-xfpc4\") pod \"community-operators-nwqhx\" (UID: \"0f018e51-8c87-444b-9d63-3dadae9cd246\") " pod="openshift-marketplace/community-operators-nwqhx" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.474843 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f018e51-8c87-444b-9d63-3dadae9cd246-catalog-content\") pod \"community-operators-nwqhx\" (UID: \"0f018e51-8c87-444b-9d63-3dadae9cd246\") " pod="openshift-marketplace/community-operators-nwqhx" Oct 01 05:31:40 crc kubenswrapper[4661]: E1001 05:31:40.475030 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:40.975019268 +0000 UTC m=+149.912997952 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.475078 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f018e51-8c87-444b-9d63-3dadae9cd246-utilities\") pod \"community-operators-nwqhx\" (UID: \"0f018e51-8c87-444b-9d63-3dadae9cd246\") " pod="openshift-marketplace/community-operators-nwqhx" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.498455 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfpc4\" (UniqueName: \"kubernetes.io/projected/0f018e51-8c87-444b-9d63-3dadae9cd246-kube-api-access-xfpc4\") pod \"community-operators-nwqhx\" (UID: \"0f018e51-8c87-444b-9d63-3dadae9cd246\") " pod="openshift-marketplace/community-operators-nwqhx" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.515728 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dtjdw"] Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.516588 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dtjdw" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.518289 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.526361 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dtjdw"] Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.575316 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.575485 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e6c3210-02ed-40f9-8e61-2a0ba0141ba7-utilities\") pod \"certified-operators-dtjdw\" (UID: \"6e6c3210-02ed-40f9-8e61-2a0ba0141ba7\") " pod="openshift-marketplace/certified-operators-dtjdw" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.575526 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e6c3210-02ed-40f9-8e61-2a0ba0141ba7-catalog-content\") pod \"certified-operators-dtjdw\" (UID: \"6e6c3210-02ed-40f9-8e61-2a0ba0141ba7\") " pod="openshift-marketplace/certified-operators-dtjdw" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.575550 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cc7sv\" (UniqueName: \"kubernetes.io/projected/6e6c3210-02ed-40f9-8e61-2a0ba0141ba7-kube-api-access-cc7sv\") pod \"certified-operators-dtjdw\" (UID: \"6e6c3210-02ed-40f9-8e61-2a0ba0141ba7\") " pod="openshift-marketplace/certified-operators-dtjdw" Oct 01 05:31:40 crc kubenswrapper[4661]: E1001 05:31:40.575703 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:41.075689371 +0000 UTC m=+150.013667985 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.676775 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e6c3210-02ed-40f9-8e61-2a0ba0141ba7-utilities\") pod \"certified-operators-dtjdw\" (UID: \"6e6c3210-02ed-40f9-8e61-2a0ba0141ba7\") " pod="openshift-marketplace/certified-operators-dtjdw" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.676817 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.676835 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e6c3210-02ed-40f9-8e61-2a0ba0141ba7-catalog-content\") pod \"certified-operators-dtjdw\" (UID: \"6e6c3210-02ed-40f9-8e61-2a0ba0141ba7\") " pod="openshift-marketplace/certified-operators-dtjdw" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.676853 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cc7sv\" (UniqueName: \"kubernetes.io/projected/6e6c3210-02ed-40f9-8e61-2a0ba0141ba7-kube-api-access-cc7sv\") pod \"certified-operators-dtjdw\" (UID: \"6e6c3210-02ed-40f9-8e61-2a0ba0141ba7\") " pod="openshift-marketplace/certified-operators-dtjdw" Oct 01 05:31:40 crc kubenswrapper[4661]: E1001 05:31:40.677178 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:41.177162715 +0000 UTC m=+150.115141329 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.677255 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e6c3210-02ed-40f9-8e61-2a0ba0141ba7-utilities\") pod \"certified-operators-dtjdw\" (UID: \"6e6c3210-02ed-40f9-8e61-2a0ba0141ba7\") " pod="openshift-marketplace/certified-operators-dtjdw" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.677363 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e6c3210-02ed-40f9-8e61-2a0ba0141ba7-catalog-content\") pod \"certified-operators-dtjdw\" (UID: \"6e6c3210-02ed-40f9-8e61-2a0ba0141ba7\") " pod="openshift-marketplace/certified-operators-dtjdw" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.687663 4661 patch_prober.go:28] interesting pod/router-default-5444994796-shn64 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 05:31:40 crc kubenswrapper[4661]: [-]has-synced failed: reason withheld Oct 01 05:31:40 crc kubenswrapper[4661]: [+]process-running ok Oct 01 05:31:40 crc kubenswrapper[4661]: healthz check failed Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.687718 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-shn64" podUID="7da01014-205a-4c43-8640-653fd3b65c0a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.706266 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cc7sv\" (UniqueName: \"kubernetes.io/projected/6e6c3210-02ed-40f9-8e61-2a0ba0141ba7-kube-api-access-cc7sv\") pod \"certified-operators-dtjdw\" (UID: \"6e6c3210-02ed-40f9-8e61-2a0ba0141ba7\") " pod="openshift-marketplace/certified-operators-dtjdw" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.729018 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-x4zzv"] Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.729900 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x4zzv" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.743712 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x4zzv"] Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.777550 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:40 crc kubenswrapper[4661]: E1001 05:31:40.777723 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:41.277700935 +0000 UTC m=+150.215679549 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.777844 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f940144-59ee-4af8-85a3-34f55b975463-catalog-content\") pod \"community-operators-x4zzv\" (UID: \"2f940144-59ee-4af8-85a3-34f55b975463\") " pod="openshift-marketplace/community-operators-x4zzv" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.777927 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbv4g\" (UniqueName: \"kubernetes.io/projected/2f940144-59ee-4af8-85a3-34f55b975463-kube-api-access-xbv4g\") pod \"community-operators-x4zzv\" (UID: \"2f940144-59ee-4af8-85a3-34f55b975463\") " pod="openshift-marketplace/community-operators-x4zzv" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.777956 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.777994 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f940144-59ee-4af8-85a3-34f55b975463-utilities\") pod \"community-operators-x4zzv\" (UID: \"2f940144-59ee-4af8-85a3-34f55b975463\") " pod="openshift-marketplace/community-operators-x4zzv" Oct 01 05:31:40 crc kubenswrapper[4661]: E1001 05:31:40.778280 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 05:31:41.27826995 +0000 UTC m=+150.216248664 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2vp7m" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.853940 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dtjdw" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.879147 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.879404 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f940144-59ee-4af8-85a3-34f55b975463-utilities\") pod \"community-operators-x4zzv\" (UID: \"2f940144-59ee-4af8-85a3-34f55b975463\") " pod="openshift-marketplace/community-operators-x4zzv" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.879448 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f940144-59ee-4af8-85a3-34f55b975463-catalog-content\") pod \"community-operators-x4zzv\" (UID: \"2f940144-59ee-4af8-85a3-34f55b975463\") " pod="openshift-marketplace/community-operators-x4zzv" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.879528 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbv4g\" (UniqueName: \"kubernetes.io/projected/2f940144-59ee-4af8-85a3-34f55b975463-kube-api-access-xbv4g\") pod \"community-operators-x4zzv\" (UID: \"2f940144-59ee-4af8-85a3-34f55b975463\") " pod="openshift-marketplace/community-operators-x4zzv" Oct 01 05:31:40 crc kubenswrapper[4661]: E1001 05:31:40.879688 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 05:31:41.379669522 +0000 UTC m=+150.317648136 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.880164 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f940144-59ee-4af8-85a3-34f55b975463-utilities\") pod \"community-operators-x4zzv\" (UID: \"2f940144-59ee-4af8-85a3-34f55b975463\") " pod="openshift-marketplace/community-operators-x4zzv" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.880255 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f940144-59ee-4af8-85a3-34f55b975463-catalog-content\") pod \"community-operators-x4zzv\" (UID: \"2f940144-59ee-4af8-85a3-34f55b975463\") " pod="openshift-marketplace/community-operators-x4zzv" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.895796 4661 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-01T05:31:40.430088128Z","Handler":null,"Name":""} Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.898854 4661 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.899054 4661 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.899120 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbv4g\" (UniqueName: \"kubernetes.io/projected/2f940144-59ee-4af8-85a3-34f55b975463-kube-api-access-xbv4g\") pod \"community-operators-x4zzv\" (UID: \"2f940144-59ee-4af8-85a3-34f55b975463\") " pod="openshift-marketplace/community-operators-x4zzv" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.916570 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-87grd"] Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.917426 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-87grd" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.927462 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-87grd"] Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.980876 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.980937 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9e824d9-0601-424c-8e2a-e9eb8a958086-catalog-content\") pod \"certified-operators-87grd\" (UID: \"d9e824d9-0601-424c-8e2a-e9eb8a958086\") " pod="openshift-marketplace/certified-operators-87grd" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.980967 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4pvk\" (UniqueName: \"kubernetes.io/projected/d9e824d9-0601-424c-8e2a-e9eb8a958086-kube-api-access-v4pvk\") pod \"certified-operators-87grd\" (UID: \"d9e824d9-0601-424c-8e2a-e9eb8a958086\") " pod="openshift-marketplace/certified-operators-87grd" Oct 01 05:31:40 crc kubenswrapper[4661]: I1001 05:31:40.981091 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9e824d9-0601-424c-8e2a-e9eb8a958086-utilities\") pod \"certified-operators-87grd\" (UID: \"d9e824d9-0601-424c-8e2a-e9eb8a958086\") " pod="openshift-marketplace/certified-operators-87grd" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.001706 4661 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.001741 4661 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.081908 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9e824d9-0601-424c-8e2a-e9eb8a958086-utilities\") pod \"certified-operators-87grd\" (UID: \"d9e824d9-0601-424c-8e2a-e9eb8a958086\") " pod="openshift-marketplace/certified-operators-87grd" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.081990 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9e824d9-0601-424c-8e2a-e9eb8a958086-catalog-content\") pod \"certified-operators-87grd\" (UID: \"d9e824d9-0601-424c-8e2a-e9eb8a958086\") " pod="openshift-marketplace/certified-operators-87grd" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.082008 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4pvk\" (UniqueName: \"kubernetes.io/projected/d9e824d9-0601-424c-8e2a-e9eb8a958086-kube-api-access-v4pvk\") pod \"certified-operators-87grd\" (UID: \"d9e824d9-0601-424c-8e2a-e9eb8a958086\") " pod="openshift-marketplace/certified-operators-87grd" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.082664 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9e824d9-0601-424c-8e2a-e9eb8a958086-utilities\") pod \"certified-operators-87grd\" (UID: \"d9e824d9-0601-424c-8e2a-e9eb8a958086\") " pod="openshift-marketplace/certified-operators-87grd" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.082752 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9e824d9-0601-424c-8e2a-e9eb8a958086-catalog-content\") pod \"certified-operators-87grd\" (UID: \"d9e824d9-0601-424c-8e2a-e9eb8a958086\") " pod="openshift-marketplace/certified-operators-87grd" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.094943 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2vp7m\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.106654 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4pvk\" (UniqueName: \"kubernetes.io/projected/d9e824d9-0601-424c-8e2a-e9eb8a958086-kube-api-access-v4pvk\") pod \"certified-operators-87grd\" (UID: \"d9e824d9-0601-424c-8e2a-e9eb8a958086\") " pod="openshift-marketplace/certified-operators-87grd" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.141530 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.158943 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dtjdw"] Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.176293 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"4b4615cb4852d914bae52201c9abaaf51c466e60a3eb9f8de7132d78014d36a2"} Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.176506 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.184138 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.186354 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" event={"ID":"d5278166-ab19-4464-87d6-3fc6fe335855","Type":"ContainerStarted","Data":"bb14bb620c2f7348bd7c64f40db54cfd582810e1b82aa0f02ce573657c23f3bf"} Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.191076 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"b651432e389acfab4c2690e7f8093994c244f7c13ddaa1fa7699b4d7daf0d609"} Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.199573 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"0ab61ccb6250d0338ab074fb63bbb5ee23871666022cdb98197152ebecaf2192"} Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.208376 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.221837 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-mzsmw" podStartSLOduration=11.22182335 podStartE2EDuration="11.22182335s" podCreationTimestamp="2025-10-01 05:31:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:41.22105566 +0000 UTC m=+150.159034264" watchObservedRunningTime="2025-10-01 05:31:41.22182335 +0000 UTC m=+150.159801954" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.272013 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-87grd" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.444435 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2vp7m"] Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.459555 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.460315 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.462490 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.462952 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.469729 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.494280 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b751b4a7-28bf-4a5b-a624-7a0d69ef9f46-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"b751b4a7-28bf-4a5b-a624-7a0d69ef9f46\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.494316 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b751b4a7-28bf-4a5b-a624-7a0d69ef9f46-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"b751b4a7-28bf-4a5b-a624-7a0d69ef9f46\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.566606 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-87grd"] Oct 01 05:31:41 crc kubenswrapper[4661]: W1001 05:31:41.576972 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd9e824d9_0601_424c_8e2a_e9eb8a958086.slice/crio-65ec5cae899762601e9e7e6e957d8d091c4359a7635c10acc39c5016bb529323 WatchSource:0}: Error finding container 65ec5cae899762601e9e7e6e957d8d091c4359a7635c10acc39c5016bb529323: Status 404 returned error can't find the container with id 65ec5cae899762601e9e7e6e957d8d091c4359a7635c10acc39c5016bb529323 Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.595055 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b751b4a7-28bf-4a5b-a624-7a0d69ef9f46-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"b751b4a7-28bf-4a5b-a624-7a0d69ef9f46\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.595102 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b751b4a7-28bf-4a5b-a624-7a0d69ef9f46-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"b751b4a7-28bf-4a5b-a624-7a0d69ef9f46\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.595215 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b751b4a7-28bf-4a5b-a624-7a0d69ef9f46-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"b751b4a7-28bf-4a5b-a624-7a0d69ef9f46\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.612944 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b751b4a7-28bf-4a5b-a624-7a0d69ef9f46-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"b751b4a7-28bf-4a5b-a624-7a0d69ef9f46\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.639813 4661 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openshift-marketplace/community-operators-nwqhx" secret="" err="failed to sync secret cache: timed out waiting for the condition" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.639892 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nwqhx" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.690715 4661 patch_prober.go:28] interesting pod/router-default-5444994796-shn64 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 05:31:41 crc kubenswrapper[4661]: [-]has-synced failed: reason withheld Oct 01 05:31:41 crc kubenswrapper[4661]: [+]process-running ok Oct 01 05:31:41 crc kubenswrapper[4661]: healthz check failed Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.690775 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-shn64" podUID="7da01014-205a-4c43-8640-653fd3b65c0a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.723136 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.723616 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x4zzv" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.773226 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.809466 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.884609 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nwqhx"] Oct 01 05:31:41 crc kubenswrapper[4661]: W1001 05:31:41.907371 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0f018e51_8c87_444b_9d63_3dadae9cd246.slice/crio-54068c464d613706400da216d14ae6daf3997a2e3a76ee4efb52af6a6cbfcdc0 WatchSource:0}: Error finding container 54068c464d613706400da216d14ae6daf3997a2e3a76ee4efb52af6a6cbfcdc0: Status 404 returned error can't find the container with id 54068c464d613706400da216d14ae6daf3997a2e3a76ee4efb52af6a6cbfcdc0 Oct 01 05:31:41 crc kubenswrapper[4661]: W1001 05:31:41.937703 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f940144_59ee_4af8_85a3_34f55b975463.slice/crio-138ae26676667f44d9b8be635b9986c155e5b43d884c76086ed0bffa99f2d1a4 WatchSource:0}: Error finding container 138ae26676667f44d9b8be635b9986c155e5b43d884c76086ed0bffa99f2d1a4: Status 404 returned error can't find the container with id 138ae26676667f44d9b8be635b9986c155e5b43d884c76086ed0bffa99f2d1a4 Oct 01 05:31:41 crc kubenswrapper[4661]: I1001 05:31:41.938121 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x4zzv"] Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.020557 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 01 05:31:42 crc kubenswrapper[4661]: W1001 05:31:42.029703 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podb751b4a7_28bf_4a5b_a624_7a0d69ef9f46.slice/crio-6d164c78607d847f78f8dc28b6abe626067c9b6ac7f55357fe00d706bd201f4e WatchSource:0}: Error finding container 6d164c78607d847f78f8dc28b6abe626067c9b6ac7f55357fe00d706bd201f4e: Status 404 returned error can't find the container with id 6d164c78607d847f78f8dc28b6abe626067c9b6ac7f55357fe00d706bd201f4e Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.204824 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b751b4a7-28bf-4a5b-a624-7a0d69ef9f46","Type":"ContainerStarted","Data":"6d164c78607d847f78f8dc28b6abe626067c9b6ac7f55357fe00d706bd201f4e"} Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.209028 4661 generic.go:334] "Generic (PLEG): container finished" podID="d9e824d9-0601-424c-8e2a-e9eb8a958086" containerID="99d542c07a52f7661236a53fbf80e1e7294488d230c5a06788aa0fbb2123fc98" exitCode=0 Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.209976 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-87grd" event={"ID":"d9e824d9-0601-424c-8e2a-e9eb8a958086","Type":"ContainerDied","Data":"99d542c07a52f7661236a53fbf80e1e7294488d230c5a06788aa0fbb2123fc98"} Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.210006 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-87grd" event={"ID":"d9e824d9-0601-424c-8e2a-e9eb8a958086","Type":"ContainerStarted","Data":"65ec5cae899762601e9e7e6e957d8d091c4359a7635c10acc39c5016bb529323"} Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.210838 4661 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.215329 4661 generic.go:334] "Generic (PLEG): container finished" podID="2f940144-59ee-4af8-85a3-34f55b975463" containerID="a47ce6337e1d83c3bf23ede780544160d3a4c89837a62297904ce1e30942cc73" exitCode=0 Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.215411 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x4zzv" event={"ID":"2f940144-59ee-4af8-85a3-34f55b975463","Type":"ContainerDied","Data":"a47ce6337e1d83c3bf23ede780544160d3a4c89837a62297904ce1e30942cc73"} Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.215447 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x4zzv" event={"ID":"2f940144-59ee-4af8-85a3-34f55b975463","Type":"ContainerStarted","Data":"138ae26676667f44d9b8be635b9986c155e5b43d884c76086ed0bffa99f2d1a4"} Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.218713 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" event={"ID":"f5f5e95a-75e8-4950-b60e-6cbbeed59b67","Type":"ContainerStarted","Data":"d7d553d71abf7e4cf0f75c576968fc100e78a298d5de06458eff8403c49c30d4"} Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.218744 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" event={"ID":"f5f5e95a-75e8-4950-b60e-6cbbeed59b67","Type":"ContainerStarted","Data":"2f1cbcecbce1e52d0cfa523badb0624655de8c8d49bc62493d16819135311761"} Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.219275 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.222353 4661 generic.go:334] "Generic (PLEG): container finished" podID="6e6c3210-02ed-40f9-8e61-2a0ba0141ba7" containerID="ea9b84b3a2ddc5e47fee9092bdb2386144e25aafd0b55ec6eab95c0ea331cca6" exitCode=0 Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.222438 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dtjdw" event={"ID":"6e6c3210-02ed-40f9-8e61-2a0ba0141ba7","Type":"ContainerDied","Data":"ea9b84b3a2ddc5e47fee9092bdb2386144e25aafd0b55ec6eab95c0ea331cca6"} Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.222479 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dtjdw" event={"ID":"6e6c3210-02ed-40f9-8e61-2a0ba0141ba7","Type":"ContainerStarted","Data":"c2f9c5a57fcd6bf4162540c0a6c1df4ff4af757895a1f8560959b36b01860a04"} Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.227225 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.230248 4661 generic.go:334] "Generic (PLEG): container finished" podID="a731ee04-6aba-49d9-b8b0-392d31d55da2" containerID="33db51cd837328066456c67b6fbda93fe880a51f254777aec2aaa59817582f10" exitCode=0 Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.230326 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz" event={"ID":"a731ee04-6aba-49d9-b8b0-392d31d55da2","Type":"ContainerDied","Data":"33db51cd837328066456c67b6fbda93fe880a51f254777aec2aaa59817582f10"} Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.234259 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-gtlzc" Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.243231 4661 generic.go:334] "Generic (PLEG): container finished" podID="0f018e51-8c87-444b-9d63-3dadae9cd246" containerID="f6b39e14b22d17a58a23e556158c808194c889519266769cb539fcf4a5d5e225" exitCode=0 Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.243364 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nwqhx" event={"ID":"0f018e51-8c87-444b-9d63-3dadae9cd246","Type":"ContainerDied","Data":"f6b39e14b22d17a58a23e556158c808194c889519266769cb539fcf4a5d5e225"} Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.243415 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nwqhx" event={"ID":"0f018e51-8c87-444b-9d63-3dadae9cd246","Type":"ContainerStarted","Data":"54068c464d613706400da216d14ae6daf3997a2e3a76ee4efb52af6a6cbfcdc0"} Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.307343 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" podStartSLOduration=129.307327969 podStartE2EDuration="2m9.307327969s" podCreationTimestamp="2025-10-01 05:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:42.305142469 +0000 UTC m=+151.243121083" watchObservedRunningTime="2025-10-01 05:31:42.307327969 +0000 UTC m=+151.245306583" Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.518546 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gn77p"] Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.519848 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gn77p" Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.524535 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.528027 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gn77p"] Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.613231 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4htr\" (UniqueName: \"kubernetes.io/projected/b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42-kube-api-access-b4htr\") pod \"redhat-marketplace-gn77p\" (UID: \"b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42\") " pod="openshift-marketplace/redhat-marketplace-gn77p" Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.613316 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42-catalog-content\") pod \"redhat-marketplace-gn77p\" (UID: \"b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42\") " pod="openshift-marketplace/redhat-marketplace-gn77p" Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.613438 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42-utilities\") pod \"redhat-marketplace-gn77p\" (UID: \"b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42\") " pod="openshift-marketplace/redhat-marketplace-gn77p" Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.686219 4661 patch_prober.go:28] interesting pod/router-default-5444994796-shn64 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 05:31:42 crc kubenswrapper[4661]: [-]has-synced failed: reason withheld Oct 01 05:31:42 crc kubenswrapper[4661]: [+]process-running ok Oct 01 05:31:42 crc kubenswrapper[4661]: healthz check failed Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.686272 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-shn64" podUID="7da01014-205a-4c43-8640-653fd3b65c0a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.713853 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42-utilities\") pod \"redhat-marketplace-gn77p\" (UID: \"b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42\") " pod="openshift-marketplace/redhat-marketplace-gn77p" Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.713921 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4htr\" (UniqueName: \"kubernetes.io/projected/b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42-kube-api-access-b4htr\") pod \"redhat-marketplace-gn77p\" (UID: \"b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42\") " pod="openshift-marketplace/redhat-marketplace-gn77p" Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.713961 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42-catalog-content\") pod \"redhat-marketplace-gn77p\" (UID: \"b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42\") " pod="openshift-marketplace/redhat-marketplace-gn77p" Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.714407 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42-utilities\") pod \"redhat-marketplace-gn77p\" (UID: \"b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42\") " pod="openshift-marketplace/redhat-marketplace-gn77p" Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.714419 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42-catalog-content\") pod \"redhat-marketplace-gn77p\" (UID: \"b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42\") " pod="openshift-marketplace/redhat-marketplace-gn77p" Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.736855 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4htr\" (UniqueName: \"kubernetes.io/projected/b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42-kube-api-access-b4htr\") pod \"redhat-marketplace-gn77p\" (UID: \"b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42\") " pod="openshift-marketplace/redhat-marketplace-gn77p" Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.842103 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gn77p" Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.918952 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fkg8j"] Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.919893 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fkg8j" Oct 01 05:31:42 crc kubenswrapper[4661]: I1001 05:31:42.943025 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fkg8j"] Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.062986 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gn77p"] Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.119246 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7984959d-6d47-4013-ad65-5f28f3c52a12-catalog-content\") pod \"redhat-marketplace-fkg8j\" (UID: \"7984959d-6d47-4013-ad65-5f28f3c52a12\") " pod="openshift-marketplace/redhat-marketplace-fkg8j" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.119435 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldsxt\" (UniqueName: \"kubernetes.io/projected/7984959d-6d47-4013-ad65-5f28f3c52a12-kube-api-access-ldsxt\") pod \"redhat-marketplace-fkg8j\" (UID: \"7984959d-6d47-4013-ad65-5f28f3c52a12\") " pod="openshift-marketplace/redhat-marketplace-fkg8j" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.119488 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7984959d-6d47-4013-ad65-5f28f3c52a12-utilities\") pod \"redhat-marketplace-fkg8j\" (UID: \"7984959d-6d47-4013-ad65-5f28f3c52a12\") " pod="openshift-marketplace/redhat-marketplace-fkg8j" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.221456 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldsxt\" (UniqueName: \"kubernetes.io/projected/7984959d-6d47-4013-ad65-5f28f3c52a12-kube-api-access-ldsxt\") pod \"redhat-marketplace-fkg8j\" (UID: \"7984959d-6d47-4013-ad65-5f28f3c52a12\") " pod="openshift-marketplace/redhat-marketplace-fkg8j" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.221520 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7984959d-6d47-4013-ad65-5f28f3c52a12-utilities\") pod \"redhat-marketplace-fkg8j\" (UID: \"7984959d-6d47-4013-ad65-5f28f3c52a12\") " pod="openshift-marketplace/redhat-marketplace-fkg8j" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.221657 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7984959d-6d47-4013-ad65-5f28f3c52a12-catalog-content\") pod \"redhat-marketplace-fkg8j\" (UID: \"7984959d-6d47-4013-ad65-5f28f3c52a12\") " pod="openshift-marketplace/redhat-marketplace-fkg8j" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.222023 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7984959d-6d47-4013-ad65-5f28f3c52a12-utilities\") pod \"redhat-marketplace-fkg8j\" (UID: \"7984959d-6d47-4013-ad65-5f28f3c52a12\") " pod="openshift-marketplace/redhat-marketplace-fkg8j" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.222089 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7984959d-6d47-4013-ad65-5f28f3c52a12-catalog-content\") pod \"redhat-marketplace-fkg8j\" (UID: \"7984959d-6d47-4013-ad65-5f28f3c52a12\") " pod="openshift-marketplace/redhat-marketplace-fkg8j" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.239561 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldsxt\" (UniqueName: \"kubernetes.io/projected/7984959d-6d47-4013-ad65-5f28f3c52a12-kube-api-access-ldsxt\") pod \"redhat-marketplace-fkg8j\" (UID: \"7984959d-6d47-4013-ad65-5f28f3c52a12\") " pod="openshift-marketplace/redhat-marketplace-fkg8j" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.260506 4661 generic.go:334] "Generic (PLEG): container finished" podID="b751b4a7-28bf-4a5b-a624-7a0d69ef9f46" containerID="5a684b7a0e700a664b6600a4ea8915cff87ef0ef4debaa35fb3e3572031567dd" exitCode=0 Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.260575 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b751b4a7-28bf-4a5b-a624-7a0d69ef9f46","Type":"ContainerDied","Data":"5a684b7a0e700a664b6600a4ea8915cff87ef0ef4debaa35fb3e3572031567dd"} Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.273016 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gn77p" event={"ID":"b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42","Type":"ContainerStarted","Data":"75145a8ae3c8dfc0b0c5042c6326507ef718006b0fb99658520727a37255f750"} Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.273114 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gn77p" event={"ID":"b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42","Type":"ContainerStarted","Data":"9830885f6d91bfa1c904fd510c21de4452ab29888be0965e48fc87d9587272c4"} Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.452771 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.518918 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-s4skm"] Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.521100 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s4skm" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.522899 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.531591 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.532235 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s4skm"] Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.535610 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fkg8j" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.617586 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.617619 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.619617 4661 patch_prober.go:28] interesting pod/console-f9d7485db-bnbps container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.23:8443/health\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.619693 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-bnbps" podUID="591acb44-aa58-4103-98f7-b68e067bc90d" containerName="console" probeResult="failure" output="Get \"https://10.217.0.23:8443/health\": dial tcp 10.217.0.23:8443: connect: connection refused" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.628120 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-nwldz" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.630370 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdlfj\" (UniqueName: \"kubernetes.io/projected/db7ff42d-9b9d-479f-966d-768148f27414-kube-api-access-vdlfj\") pod \"redhat-operators-s4skm\" (UID: \"db7ff42d-9b9d-479f-966d-768148f27414\") " pod="openshift-marketplace/redhat-operators-s4skm" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.630431 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db7ff42d-9b9d-479f-966d-768148f27414-catalog-content\") pod \"redhat-operators-s4skm\" (UID: \"db7ff42d-9b9d-479f-966d-768148f27414\") " pod="openshift-marketplace/redhat-operators-s4skm" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.631553 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db7ff42d-9b9d-479f-966d-768148f27414-utilities\") pod \"redhat-operators-s4skm\" (UID: \"db7ff42d-9b9d-479f-966d-768148f27414\") " pod="openshift-marketplace/redhat-operators-s4skm" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.680945 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-shn64" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.683728 4661 patch_prober.go:28] interesting pod/downloads-7954f5f757-2nj65 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.27:8080/\": dial tcp 10.217.0.27:8080: connect: connection refused" start-of-body= Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.683774 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2nj65" podUID="5d412a34-f608-4b2d-8485-197efa42d0f9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.27:8080/\": dial tcp 10.217.0.27:8080: connect: connection refused" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.684327 4661 patch_prober.go:28] interesting pod/downloads-7954f5f757-2nj65 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.27:8080/\": dial tcp 10.217.0.27:8080: connect: connection refused" start-of-body= Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.684350 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-2nj65" podUID="5d412a34-f608-4b2d-8485-197efa42d0f9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.27:8080/\": dial tcp 10.217.0.27:8080: connect: connection refused" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.686042 4661 patch_prober.go:28] interesting pod/router-default-5444994796-shn64 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 05:31:43 crc kubenswrapper[4661]: [-]has-synced failed: reason withheld Oct 01 05:31:43 crc kubenswrapper[4661]: [+]process-running ok Oct 01 05:31:43 crc kubenswrapper[4661]: healthz check failed Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.686068 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-shn64" podUID="7da01014-205a-4c43-8640-653fd3b65c0a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.735503 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a731ee04-6aba-49d9-b8b0-392d31d55da2-config-volume\") pod \"a731ee04-6aba-49d9-b8b0-392d31d55da2\" (UID: \"a731ee04-6aba-49d9-b8b0-392d31d55da2\") " Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.736061 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2k6j9\" (UniqueName: \"kubernetes.io/projected/a731ee04-6aba-49d9-b8b0-392d31d55da2-kube-api-access-2k6j9\") pod \"a731ee04-6aba-49d9-b8b0-392d31d55da2\" (UID: \"a731ee04-6aba-49d9-b8b0-392d31d55da2\") " Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.736550 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a731ee04-6aba-49d9-b8b0-392d31d55da2-secret-volume\") pod \"a731ee04-6aba-49d9-b8b0-392d31d55da2\" (UID: \"a731ee04-6aba-49d9-b8b0-392d31d55da2\") " Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.736624 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a731ee04-6aba-49d9-b8b0-392d31d55da2-config-volume" (OuterVolumeSpecName: "config-volume") pod "a731ee04-6aba-49d9-b8b0-392d31d55da2" (UID: "a731ee04-6aba-49d9-b8b0-392d31d55da2"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.736810 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdlfj\" (UniqueName: \"kubernetes.io/projected/db7ff42d-9b9d-479f-966d-768148f27414-kube-api-access-vdlfj\") pod \"redhat-operators-s4skm\" (UID: \"db7ff42d-9b9d-479f-966d-768148f27414\") " pod="openshift-marketplace/redhat-operators-s4skm" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.736926 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db7ff42d-9b9d-479f-966d-768148f27414-catalog-content\") pod \"redhat-operators-s4skm\" (UID: \"db7ff42d-9b9d-479f-966d-768148f27414\") " pod="openshift-marketplace/redhat-operators-s4skm" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.736992 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db7ff42d-9b9d-479f-966d-768148f27414-utilities\") pod \"redhat-operators-s4skm\" (UID: \"db7ff42d-9b9d-479f-966d-768148f27414\") " pod="openshift-marketplace/redhat-operators-s4skm" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.737769 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db7ff42d-9b9d-479f-966d-768148f27414-catalog-content\") pod \"redhat-operators-s4skm\" (UID: \"db7ff42d-9b9d-479f-966d-768148f27414\") " pod="openshift-marketplace/redhat-operators-s4skm" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.739445 4661 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a731ee04-6aba-49d9-b8b0-392d31d55da2-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.739725 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db7ff42d-9b9d-479f-966d-768148f27414-utilities\") pod \"redhat-operators-s4skm\" (UID: \"db7ff42d-9b9d-479f-966d-768148f27414\") " pod="openshift-marketplace/redhat-operators-s4skm" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.741453 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a731ee04-6aba-49d9-b8b0-392d31d55da2-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a731ee04-6aba-49d9-b8b0-392d31d55da2" (UID: "a731ee04-6aba-49d9-b8b0-392d31d55da2"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.742021 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a731ee04-6aba-49d9-b8b0-392d31d55da2-kube-api-access-2k6j9" (OuterVolumeSpecName: "kube-api-access-2k6j9") pod "a731ee04-6aba-49d9-b8b0-392d31d55da2" (UID: "a731ee04-6aba-49d9-b8b0-392d31d55da2"). InnerVolumeSpecName "kube-api-access-2k6j9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.755002 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdlfj\" (UniqueName: \"kubernetes.io/projected/db7ff42d-9b9d-479f-966d-768148f27414-kube-api-access-vdlfj\") pod \"redhat-operators-s4skm\" (UID: \"db7ff42d-9b9d-479f-966d-768148f27414\") " pod="openshift-marketplace/redhat-operators-s4skm" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.840914 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2k6j9\" (UniqueName: \"kubernetes.io/projected/a731ee04-6aba-49d9-b8b0-392d31d55da2-kube-api-access-2k6j9\") on node \"crc\" DevicePath \"\"" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.840946 4661 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a731ee04-6aba-49d9-b8b0-392d31d55da2-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.842172 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fkg8j"] Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.851914 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s4skm" Oct 01 05:31:43 crc kubenswrapper[4661]: W1001 05:31:43.853348 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7984959d_6d47_4013_ad65_5f28f3c52a12.slice/crio-891f14a08eea091a2c7f6d2ad3a5b00b1c0d5cd36bfc3daf8bb787f6b125f5a5 WatchSource:0}: Error finding container 891f14a08eea091a2c7f6d2ad3a5b00b1c0d5cd36bfc3daf8bb787f6b125f5a5: Status 404 returned error can't find the container with id 891f14a08eea091a2c7f6d2ad3a5b00b1c0d5cd36bfc3daf8bb787f6b125f5a5 Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.917025 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-h9mq5"] Oct 01 05:31:43 crc kubenswrapper[4661]: E1001 05:31:43.917311 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a731ee04-6aba-49d9-b8b0-392d31d55da2" containerName="collect-profiles" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.917328 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="a731ee04-6aba-49d9-b8b0-392d31d55da2" containerName="collect-profiles" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.917461 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="a731ee04-6aba-49d9-b8b0-392d31d55da2" containerName="collect-profiles" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.924433 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h9mq5" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.925474 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h9mq5"] Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.942785 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddab3196-9370-4574-8164-5aa6fe68f83a-utilities\") pod \"redhat-operators-h9mq5\" (UID: \"ddab3196-9370-4574-8164-5aa6fe68f83a\") " pod="openshift-marketplace/redhat-operators-h9mq5" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.942890 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddab3196-9370-4574-8164-5aa6fe68f83a-catalog-content\") pod \"redhat-operators-h9mq5\" (UID: \"ddab3196-9370-4574-8164-5aa6fe68f83a\") " pod="openshift-marketplace/redhat-operators-h9mq5" Oct 01 05:31:43 crc kubenswrapper[4661]: I1001 05:31:43.942976 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7m2f\" (UniqueName: \"kubernetes.io/projected/ddab3196-9370-4574-8164-5aa6fe68f83a-kube-api-access-g7m2f\") pod \"redhat-operators-h9mq5\" (UID: \"ddab3196-9370-4574-8164-5aa6fe68f83a\") " pod="openshift-marketplace/redhat-operators-h9mq5" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.044683 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddab3196-9370-4574-8164-5aa6fe68f83a-utilities\") pod \"redhat-operators-h9mq5\" (UID: \"ddab3196-9370-4574-8164-5aa6fe68f83a\") " pod="openshift-marketplace/redhat-operators-h9mq5" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.045051 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddab3196-9370-4574-8164-5aa6fe68f83a-catalog-content\") pod \"redhat-operators-h9mq5\" (UID: \"ddab3196-9370-4574-8164-5aa6fe68f83a\") " pod="openshift-marketplace/redhat-operators-h9mq5" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.045076 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7m2f\" (UniqueName: \"kubernetes.io/projected/ddab3196-9370-4574-8164-5aa6fe68f83a-kube-api-access-g7m2f\") pod \"redhat-operators-h9mq5\" (UID: \"ddab3196-9370-4574-8164-5aa6fe68f83a\") " pod="openshift-marketplace/redhat-operators-h9mq5" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.045844 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddab3196-9370-4574-8164-5aa6fe68f83a-utilities\") pod \"redhat-operators-h9mq5\" (UID: \"ddab3196-9370-4574-8164-5aa6fe68f83a\") " pod="openshift-marketplace/redhat-operators-h9mq5" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.046115 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddab3196-9370-4574-8164-5aa6fe68f83a-catalog-content\") pod \"redhat-operators-h9mq5\" (UID: \"ddab3196-9370-4574-8164-5aa6fe68f83a\") " pod="openshift-marketplace/redhat-operators-h9mq5" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.060898 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7m2f\" (UniqueName: \"kubernetes.io/projected/ddab3196-9370-4574-8164-5aa6fe68f83a-kube-api-access-g7m2f\") pod \"redhat-operators-h9mq5\" (UID: \"ddab3196-9370-4574-8164-5aa6fe68f83a\") " pod="openshift-marketplace/redhat-operators-h9mq5" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.085501 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.247717 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h9mq5" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.277799 4661 generic.go:334] "Generic (PLEG): container finished" podID="b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42" containerID="75145a8ae3c8dfc0b0c5042c6326507ef718006b0fb99658520727a37255f750" exitCode=0 Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.277875 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gn77p" event={"ID":"b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42","Type":"ContainerDied","Data":"75145a8ae3c8dfc0b0c5042c6326507ef718006b0fb99658520727a37255f750"} Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.286753 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s4skm"] Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.288079 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.288092 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz" event={"ID":"a731ee04-6aba-49d9-b8b0-392d31d55da2","Type":"ContainerDied","Data":"6c2590f2e33bfdf719558afa7696c68fbbe9aca2f905913db707bed0c64a8fe2"} Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.288126 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c2590f2e33bfdf719558afa7696c68fbbe9aca2f905913db707bed0c64a8fe2" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.289942 4661 generic.go:334] "Generic (PLEG): container finished" podID="7984959d-6d47-4013-ad65-5f28f3c52a12" containerID="59965df2e48962788036426a03cb8e4050b72cec4c3f77ce5df028c11ffab720" exitCode=0 Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.290514 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fkg8j" event={"ID":"7984959d-6d47-4013-ad65-5f28f3c52a12","Type":"ContainerDied","Data":"59965df2e48962788036426a03cb8e4050b72cec4c3f77ce5df028c11ffab720"} Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.290541 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fkg8j" event={"ID":"7984959d-6d47-4013-ad65-5f28f3c52a12","Type":"ContainerStarted","Data":"891f14a08eea091a2c7f6d2ad3a5b00b1c0d5cd36bfc3daf8bb787f6b125f5a5"} Oct 01 05:31:44 crc kubenswrapper[4661]: W1001 05:31:44.313653 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddb7ff42d_9b9d_479f_966d_768148f27414.slice/crio-7fe3fabde691c931186b6aac164bac37329e875876914866c4e5256a311c2b1a WatchSource:0}: Error finding container 7fe3fabde691c931186b6aac164bac37329e875876914866c4e5256a311c2b1a: Status 404 returned error can't find the container with id 7fe3fabde691c931186b6aac164bac37329e875876914866c4e5256a311c2b1a Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.444508 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-knw9f" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.687295 4661 patch_prober.go:28] interesting pod/router-default-5444994796-shn64 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 05:31:44 crc kubenswrapper[4661]: [-]has-synced failed: reason withheld Oct 01 05:31:44 crc kubenswrapper[4661]: [+]process-running ok Oct 01 05:31:44 crc kubenswrapper[4661]: healthz check failed Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.687347 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-shn64" podUID="7da01014-205a-4c43-8640-653fd3b65c0a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.726349 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.780065 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h9mq5"] Oct 01 05:31:44 crc kubenswrapper[4661]: W1001 05:31:44.827519 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podddab3196_9370_4574_8164_5aa6fe68f83a.slice/crio-cd6b9cec5c30bd39b01f526ca1b26aa6a097ab745d3cbe4f5d8e156e3e945c71 WatchSource:0}: Error finding container cd6b9cec5c30bd39b01f526ca1b26aa6a097ab745d3cbe4f5d8e156e3e945c71: Status 404 returned error can't find the container with id cd6b9cec5c30bd39b01f526ca1b26aa6a097ab745d3cbe4f5d8e156e3e945c71 Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.854488 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b751b4a7-28bf-4a5b-a624-7a0d69ef9f46-kube-api-access\") pod \"b751b4a7-28bf-4a5b-a624-7a0d69ef9f46\" (UID: \"b751b4a7-28bf-4a5b-a624-7a0d69ef9f46\") " Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.854527 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b751b4a7-28bf-4a5b-a624-7a0d69ef9f46-kubelet-dir\") pod \"b751b4a7-28bf-4a5b-a624-7a0d69ef9f46\" (UID: \"b751b4a7-28bf-4a5b-a624-7a0d69ef9f46\") " Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.854988 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b751b4a7-28bf-4a5b-a624-7a0d69ef9f46-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "b751b4a7-28bf-4a5b-a624-7a0d69ef9f46" (UID: "b751b4a7-28bf-4a5b-a624-7a0d69ef9f46"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.865148 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b751b4a7-28bf-4a5b-a624-7a0d69ef9f46-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "b751b4a7-28bf-4a5b-a624-7a0d69ef9f46" (UID: "b751b4a7-28bf-4a5b-a624-7a0d69ef9f46"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.906536 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 01 05:31:44 crc kubenswrapper[4661]: E1001 05:31:44.906846 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b751b4a7-28bf-4a5b-a624-7a0d69ef9f46" containerName="pruner" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.906856 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="b751b4a7-28bf-4a5b-a624-7a0d69ef9f46" containerName="pruner" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.906954 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="b751b4a7-28bf-4a5b-a624-7a0d69ef9f46" containerName="pruner" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.907350 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.908665 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.908747 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.917675 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.956735 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b751b4a7-28bf-4a5b-a624-7a0d69ef9f46-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 05:31:44 crc kubenswrapper[4661]: I1001 05:31:44.957027 4661 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b751b4a7-28bf-4a5b-a624-7a0d69ef9f46-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 01 05:31:45 crc kubenswrapper[4661]: I1001 05:31:45.062763 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/85a71b20-a1b6-4ad2-81e4-117a51dda7f2-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"85a71b20-a1b6-4ad2-81e4-117a51dda7f2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 05:31:45 crc kubenswrapper[4661]: I1001 05:31:45.062823 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/85a71b20-a1b6-4ad2-81e4-117a51dda7f2-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"85a71b20-a1b6-4ad2-81e4-117a51dda7f2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 05:31:45 crc kubenswrapper[4661]: I1001 05:31:45.163662 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/85a71b20-a1b6-4ad2-81e4-117a51dda7f2-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"85a71b20-a1b6-4ad2-81e4-117a51dda7f2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 05:31:45 crc kubenswrapper[4661]: I1001 05:31:45.163710 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/85a71b20-a1b6-4ad2-81e4-117a51dda7f2-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"85a71b20-a1b6-4ad2-81e4-117a51dda7f2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 05:31:45 crc kubenswrapper[4661]: I1001 05:31:45.163801 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/85a71b20-a1b6-4ad2-81e4-117a51dda7f2-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"85a71b20-a1b6-4ad2-81e4-117a51dda7f2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 05:31:45 crc kubenswrapper[4661]: I1001 05:31:45.192874 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/85a71b20-a1b6-4ad2-81e4-117a51dda7f2-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"85a71b20-a1b6-4ad2-81e4-117a51dda7f2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 05:31:45 crc kubenswrapper[4661]: I1001 05:31:45.241493 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 05:31:45 crc kubenswrapper[4661]: I1001 05:31:45.341091 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b751b4a7-28bf-4a5b-a624-7a0d69ef9f46","Type":"ContainerDied","Data":"6d164c78607d847f78f8dc28b6abe626067c9b6ac7f55357fe00d706bd201f4e"} Oct 01 05:31:45 crc kubenswrapper[4661]: I1001 05:31:45.341131 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d164c78607d847f78f8dc28b6abe626067c9b6ac7f55357fe00d706bd201f4e" Oct 01 05:31:45 crc kubenswrapper[4661]: I1001 05:31:45.341185 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 05:31:45 crc kubenswrapper[4661]: I1001 05:31:45.372219 4661 generic.go:334] "Generic (PLEG): container finished" podID="db7ff42d-9b9d-479f-966d-768148f27414" containerID="3ceb103f23b7b8c06640305ba6d98e2bde5aeee0e4f514183fc637eee4da2afc" exitCode=0 Oct 01 05:31:45 crc kubenswrapper[4661]: I1001 05:31:45.372463 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s4skm" event={"ID":"db7ff42d-9b9d-479f-966d-768148f27414","Type":"ContainerDied","Data":"3ceb103f23b7b8c06640305ba6d98e2bde5aeee0e4f514183fc637eee4da2afc"} Oct 01 05:31:45 crc kubenswrapper[4661]: I1001 05:31:45.372517 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s4skm" event={"ID":"db7ff42d-9b9d-479f-966d-768148f27414","Type":"ContainerStarted","Data":"7fe3fabde691c931186b6aac164bac37329e875876914866c4e5256a311c2b1a"} Oct 01 05:31:45 crc kubenswrapper[4661]: I1001 05:31:45.376989 4661 generic.go:334] "Generic (PLEG): container finished" podID="ddab3196-9370-4574-8164-5aa6fe68f83a" containerID="91ec6c172eca60d1b6bcf8e0690c6dc67b518409120f2650c028c5b1ec95cb6d" exitCode=0 Oct 01 05:31:45 crc kubenswrapper[4661]: I1001 05:31:45.377045 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h9mq5" event={"ID":"ddab3196-9370-4574-8164-5aa6fe68f83a","Type":"ContainerDied","Data":"91ec6c172eca60d1b6bcf8e0690c6dc67b518409120f2650c028c5b1ec95cb6d"} Oct 01 05:31:45 crc kubenswrapper[4661]: I1001 05:31:45.377085 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h9mq5" event={"ID":"ddab3196-9370-4574-8164-5aa6fe68f83a","Type":"ContainerStarted","Data":"cd6b9cec5c30bd39b01f526ca1b26aa6a097ab745d3cbe4f5d8e156e3e945c71"} Oct 01 05:31:45 crc kubenswrapper[4661]: I1001 05:31:45.667805 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 01 05:31:45 crc kubenswrapper[4661]: I1001 05:31:45.695764 4661 patch_prober.go:28] interesting pod/router-default-5444994796-shn64 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 05:31:45 crc kubenswrapper[4661]: [-]has-synced failed: reason withheld Oct 01 05:31:45 crc kubenswrapper[4661]: [+]process-running ok Oct 01 05:31:45 crc kubenswrapper[4661]: healthz check failed Oct 01 05:31:45 crc kubenswrapper[4661]: I1001 05:31:45.695842 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-shn64" podUID="7da01014-205a-4c43-8640-653fd3b65c0a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 05:31:46 crc kubenswrapper[4661]: I1001 05:31:46.166135 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-7mdcj" Oct 01 05:31:46 crc kubenswrapper[4661]: I1001 05:31:46.384591 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"85a71b20-a1b6-4ad2-81e4-117a51dda7f2","Type":"ContainerStarted","Data":"35e849fbf5ce55cdf7f065f02bf79fd73a501069a7d9a25ac290c61dd3ca575f"} Oct 01 05:31:46 crc kubenswrapper[4661]: I1001 05:31:46.684010 4661 patch_prober.go:28] interesting pod/router-default-5444994796-shn64 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 05:31:46 crc kubenswrapper[4661]: [-]has-synced failed: reason withheld Oct 01 05:31:46 crc kubenswrapper[4661]: [+]process-running ok Oct 01 05:31:46 crc kubenswrapper[4661]: healthz check failed Oct 01 05:31:46 crc kubenswrapper[4661]: I1001 05:31:46.684077 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-shn64" podUID="7da01014-205a-4c43-8640-653fd3b65c0a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 05:31:47 crc kubenswrapper[4661]: I1001 05:31:47.392848 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"85a71b20-a1b6-4ad2-81e4-117a51dda7f2","Type":"ContainerStarted","Data":"dbb8a8ecbabe840fcfcb1fc9f5d6322e00c4955cd5a87f2c1bb49c03fd2bd9fa"} Oct 01 05:31:47 crc kubenswrapper[4661]: I1001 05:31:47.406475 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.406460654 podStartE2EDuration="3.406460654s" podCreationTimestamp="2025-10-01 05:31:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:31:47.40448234 +0000 UTC m=+156.342460954" watchObservedRunningTime="2025-10-01 05:31:47.406460654 +0000 UTC m=+156.344439268" Oct 01 05:31:47 crc kubenswrapper[4661]: I1001 05:31:47.683322 4661 patch_prober.go:28] interesting pod/router-default-5444994796-shn64 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 05:31:47 crc kubenswrapper[4661]: [+]has-synced ok Oct 01 05:31:47 crc kubenswrapper[4661]: [+]process-running ok Oct 01 05:31:47 crc kubenswrapper[4661]: healthz check failed Oct 01 05:31:47 crc kubenswrapper[4661]: I1001 05:31:47.683607 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-shn64" podUID="7da01014-205a-4c43-8640-653fd3b65c0a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 05:31:48 crc kubenswrapper[4661]: I1001 05:31:48.402394 4661 generic.go:334] "Generic (PLEG): container finished" podID="85a71b20-a1b6-4ad2-81e4-117a51dda7f2" containerID="dbb8a8ecbabe840fcfcb1fc9f5d6322e00c4955cd5a87f2c1bb49c03fd2bd9fa" exitCode=0 Oct 01 05:31:48 crc kubenswrapper[4661]: I1001 05:31:48.402432 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"85a71b20-a1b6-4ad2-81e4-117a51dda7f2","Type":"ContainerDied","Data":"dbb8a8ecbabe840fcfcb1fc9f5d6322e00c4955cd5a87f2c1bb49c03fd2bd9fa"} Oct 01 05:31:48 crc kubenswrapper[4661]: I1001 05:31:48.688444 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-shn64" Oct 01 05:31:48 crc kubenswrapper[4661]: I1001 05:31:48.693221 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-shn64" Oct 01 05:31:49 crc kubenswrapper[4661]: I1001 05:31:49.679562 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 05:31:49 crc kubenswrapper[4661]: I1001 05:31:49.836760 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/85a71b20-a1b6-4ad2-81e4-117a51dda7f2-kube-api-access\") pod \"85a71b20-a1b6-4ad2-81e4-117a51dda7f2\" (UID: \"85a71b20-a1b6-4ad2-81e4-117a51dda7f2\") " Oct 01 05:31:49 crc kubenswrapper[4661]: I1001 05:31:49.836814 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/85a71b20-a1b6-4ad2-81e4-117a51dda7f2-kubelet-dir\") pod \"85a71b20-a1b6-4ad2-81e4-117a51dda7f2\" (UID: \"85a71b20-a1b6-4ad2-81e4-117a51dda7f2\") " Oct 01 05:31:49 crc kubenswrapper[4661]: I1001 05:31:49.837047 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/85a71b20-a1b6-4ad2-81e4-117a51dda7f2-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "85a71b20-a1b6-4ad2-81e4-117a51dda7f2" (UID: "85a71b20-a1b6-4ad2-81e4-117a51dda7f2"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:31:49 crc kubenswrapper[4661]: I1001 05:31:49.872819 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85a71b20-a1b6-4ad2-81e4-117a51dda7f2-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "85a71b20-a1b6-4ad2-81e4-117a51dda7f2" (UID: "85a71b20-a1b6-4ad2-81e4-117a51dda7f2"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:31:49 crc kubenswrapper[4661]: I1001 05:31:49.938135 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/85a71b20-a1b6-4ad2-81e4-117a51dda7f2-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 05:31:49 crc kubenswrapper[4661]: I1001 05:31:49.938164 4661 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/85a71b20-a1b6-4ad2-81e4-117a51dda7f2-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 01 05:31:50 crc kubenswrapper[4661]: I1001 05:31:50.417722 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"85a71b20-a1b6-4ad2-81e4-117a51dda7f2","Type":"ContainerDied","Data":"35e849fbf5ce55cdf7f065f02bf79fd73a501069a7d9a25ac290c61dd3ca575f"} Oct 01 05:31:50 crc kubenswrapper[4661]: I1001 05:31:50.417986 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="35e849fbf5ce55cdf7f065f02bf79fd73a501069a7d9a25ac290c61dd3ca575f" Oct 01 05:31:50 crc kubenswrapper[4661]: I1001 05:31:50.417787 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 05:31:53 crc kubenswrapper[4661]: I1001 05:31:53.623978 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:53 crc kubenswrapper[4661]: I1001 05:31:53.632875 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:31:53 crc kubenswrapper[4661]: I1001 05:31:53.683876 4661 patch_prober.go:28] interesting pod/downloads-7954f5f757-2nj65 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.27:8080/\": dial tcp 10.217.0.27:8080: connect: connection refused" start-of-body= Oct 01 05:31:53 crc kubenswrapper[4661]: I1001 05:31:53.683970 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-2nj65" podUID="5d412a34-f608-4b2d-8485-197efa42d0f9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.27:8080/\": dial tcp 10.217.0.27:8080: connect: connection refused" Oct 01 05:31:53 crc kubenswrapper[4661]: I1001 05:31:53.684040 4661 patch_prober.go:28] interesting pod/downloads-7954f5f757-2nj65 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.27:8080/\": dial tcp 10.217.0.27:8080: connect: connection refused" start-of-body= Oct 01 05:31:53 crc kubenswrapper[4661]: I1001 05:31:53.684103 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2nj65" podUID="5d412a34-f608-4b2d-8485-197efa42d0f9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.27:8080/\": dial tcp 10.217.0.27:8080: connect: connection refused" Oct 01 05:31:56 crc kubenswrapper[4661]: I1001 05:31:56.066759 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs\") pod \"network-metrics-daemon-rsrzg\" (UID: \"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\") " pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:31:56 crc kubenswrapper[4661]: I1001 05:31:56.072212 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f05366d-d4ea-4cf0-b2cf-3a787dca8115-metrics-certs\") pod \"network-metrics-daemon-rsrzg\" (UID: \"6f05366d-d4ea-4cf0-b2cf-3a787dca8115\") " pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:31:56 crc kubenswrapper[4661]: I1001 05:31:56.100058 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rsrzg" Oct 01 05:32:01 crc kubenswrapper[4661]: I1001 05:32:01.151217 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:32:03 crc kubenswrapper[4661]: I1001 05:32:03.693737 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-2nj65" Oct 01 05:32:04 crc kubenswrapper[4661]: I1001 05:32:04.309307 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:32:04 crc kubenswrapper[4661]: I1001 05:32:04.309385 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:32:14 crc kubenswrapper[4661]: I1001 05:32:14.032806 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5mmdv" Oct 01 05:32:16 crc kubenswrapper[4661]: E1001 05:32:16.703673 4661 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 01 05:32:16 crc kubenswrapper[4661]: E1001 05:32:16.704584 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cc7sv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-dtjdw_openshift-marketplace(6e6c3210-02ed-40f9-8e61-2a0ba0141ba7): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 05:32:16 crc kubenswrapper[4661]: E1001 05:32:16.706012 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-dtjdw" podUID="6e6c3210-02ed-40f9-8e61-2a0ba0141ba7" Oct 01 05:32:16 crc kubenswrapper[4661]: E1001 05:32:16.745329 4661 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 01 05:32:16 crc kubenswrapper[4661]: E1001 05:32:16.745534 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v4pvk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-87grd_openshift-marketplace(d9e824d9-0601-424c-8e2a-e9eb8a958086): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 05:32:16 crc kubenswrapper[4661]: E1001 05:32:16.746702 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-87grd" podUID="d9e824d9-0601-424c-8e2a-e9eb8a958086" Oct 01 05:32:17 crc kubenswrapper[4661]: E1001 05:32:17.278765 4661 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 01 05:32:17 crc kubenswrapper[4661]: E1001 05:32:17.279159 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ldsxt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-fkg8j_openshift-marketplace(7984959d-6d47-4013-ad65-5f28f3c52a12): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 05:32:17 crc kubenswrapper[4661]: E1001 05:32:17.281066 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-fkg8j" podUID="7984959d-6d47-4013-ad65-5f28f3c52a12" Oct 01 05:32:19 crc kubenswrapper[4661]: I1001 05:32:19.293743 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 05:32:20 crc kubenswrapper[4661]: E1001 05:32:20.317183 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-dtjdw" podUID="6e6c3210-02ed-40f9-8e61-2a0ba0141ba7" Oct 01 05:32:20 crc kubenswrapper[4661]: E1001 05:32:20.318321 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-87grd" podUID="d9e824d9-0601-424c-8e2a-e9eb8a958086" Oct 01 05:32:20 crc kubenswrapper[4661]: E1001 05:32:20.318541 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-fkg8j" podUID="7984959d-6d47-4013-ad65-5f28f3c52a12" Oct 01 05:32:20 crc kubenswrapper[4661]: E1001 05:32:20.526082 4661 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 01 05:32:20 crc kubenswrapper[4661]: E1001 05:32:20.526568 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b4htr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-gn77p_openshift-marketplace(b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 05:32:20 crc kubenswrapper[4661]: E1001 05:32:20.527925 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-gn77p" podUID="b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42" Oct 01 05:32:21 crc kubenswrapper[4661]: E1001 05:32:21.657801 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-gn77p" podUID="b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42" Oct 01 05:32:21 crc kubenswrapper[4661]: E1001 05:32:21.714483 4661 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 01 05:32:21 crc kubenswrapper[4661]: E1001 05:32:21.714656 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xfpc4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-nwqhx_openshift-marketplace(0f018e51-8c87-444b-9d63-3dadae9cd246): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 05:32:21 crc kubenswrapper[4661]: E1001 05:32:21.715829 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-nwqhx" podUID="0f018e51-8c87-444b-9d63-3dadae9cd246" Oct 01 05:32:21 crc kubenswrapper[4661]: E1001 05:32:21.834306 4661 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 01 05:32:21 crc kubenswrapper[4661]: E1001 05:32:21.834434 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xbv4g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-x4zzv_openshift-marketplace(2f940144-59ee-4af8-85a3-34f55b975463): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 05:32:21 crc kubenswrapper[4661]: E1001 05:32:21.835570 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-x4zzv" podUID="2f940144-59ee-4af8-85a3-34f55b975463" Oct 01 05:32:22 crc kubenswrapper[4661]: I1001 05:32:22.203454 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-rsrzg"] Oct 01 05:32:22 crc kubenswrapper[4661]: I1001 05:32:22.619236 4661 generic.go:334] "Generic (PLEG): container finished" podID="ddab3196-9370-4574-8164-5aa6fe68f83a" containerID="0b66a99f4960876fd279fcb81c8cfc8d939bedf027fe900c86dfb9a54dc9dc07" exitCode=0 Oct 01 05:32:22 crc kubenswrapper[4661]: I1001 05:32:22.619319 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h9mq5" event={"ID":"ddab3196-9370-4574-8164-5aa6fe68f83a","Type":"ContainerDied","Data":"0b66a99f4960876fd279fcb81c8cfc8d939bedf027fe900c86dfb9a54dc9dc07"} Oct 01 05:32:22 crc kubenswrapper[4661]: I1001 05:32:22.628780 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" event={"ID":"6f05366d-d4ea-4cf0-b2cf-3a787dca8115","Type":"ContainerStarted","Data":"7f1dc6c2b3fec481cc463551f40a806e6f467c4509604429af8eede407386619"} Oct 01 05:32:22 crc kubenswrapper[4661]: I1001 05:32:22.628829 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" event={"ID":"6f05366d-d4ea-4cf0-b2cf-3a787dca8115","Type":"ContainerStarted","Data":"45433f815a35d4bf0de3271599f4bcc25b7e7b074cb65cd0fd4ce47fd2b72631"} Oct 01 05:32:22 crc kubenswrapper[4661]: I1001 05:32:22.632369 4661 generic.go:334] "Generic (PLEG): container finished" podID="db7ff42d-9b9d-479f-966d-768148f27414" containerID="997db8213ef1b7764eb9acebadfac341c2fbeef8c63887b780ee0742edafb4e1" exitCode=0 Oct 01 05:32:22 crc kubenswrapper[4661]: I1001 05:32:22.632506 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s4skm" event={"ID":"db7ff42d-9b9d-479f-966d-768148f27414","Type":"ContainerDied","Data":"997db8213ef1b7764eb9acebadfac341c2fbeef8c63887b780ee0742edafb4e1"} Oct 01 05:32:22 crc kubenswrapper[4661]: E1001 05:32:22.634433 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-nwqhx" podUID="0f018e51-8c87-444b-9d63-3dadae9cd246" Oct 01 05:32:22 crc kubenswrapper[4661]: E1001 05:32:22.637785 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-x4zzv" podUID="2f940144-59ee-4af8-85a3-34f55b975463" Oct 01 05:32:23 crc kubenswrapper[4661]: I1001 05:32:23.643748 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s4skm" event={"ID":"db7ff42d-9b9d-479f-966d-768148f27414","Type":"ContainerStarted","Data":"3a115a99ed504ae55e539f9822b49abfd795c43ba6c317413375aa489a64c8f0"} Oct 01 05:32:23 crc kubenswrapper[4661]: I1001 05:32:23.647020 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h9mq5" event={"ID":"ddab3196-9370-4574-8164-5aa6fe68f83a","Type":"ContainerStarted","Data":"139d8cb207bee2680890cc416be6f1254a102064194836811e029d5153e35a80"} Oct 01 05:32:23 crc kubenswrapper[4661]: I1001 05:32:23.650532 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-rsrzg" event={"ID":"6f05366d-d4ea-4cf0-b2cf-3a787dca8115","Type":"ContainerStarted","Data":"25759cddeb1e8b3b9f042d2ea0eca933dc42fc40d3eb587af083b67a894c7e58"} Oct 01 05:32:23 crc kubenswrapper[4661]: I1001 05:32:23.673790 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-s4skm" podStartSLOduration=2.7961317980000002 podStartE2EDuration="40.673768021s" podCreationTimestamp="2025-10-01 05:31:43 +0000 UTC" firstStartedPulling="2025-10-01 05:31:45.375071698 +0000 UTC m=+154.313050312" lastFinishedPulling="2025-10-01 05:32:23.252707921 +0000 UTC m=+192.190686535" observedRunningTime="2025-10-01 05:32:23.66742967 +0000 UTC m=+192.605408324" watchObservedRunningTime="2025-10-01 05:32:23.673768021 +0000 UTC m=+192.611746645" Oct 01 05:32:23 crc kubenswrapper[4661]: I1001 05:32:23.703939 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-rsrzg" podStartSLOduration=171.70391387 podStartE2EDuration="2m51.70391387s" podCreationTimestamp="2025-10-01 05:29:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:32:23.700907938 +0000 UTC m=+192.638886582" watchObservedRunningTime="2025-10-01 05:32:23.70391387 +0000 UTC m=+192.641892514" Oct 01 05:32:23 crc kubenswrapper[4661]: I1001 05:32:23.738421 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-h9mq5" podStartSLOduration=3.04558638 podStartE2EDuration="40.738400156s" podCreationTimestamp="2025-10-01 05:31:43 +0000 UTC" firstStartedPulling="2025-10-01 05:31:45.378550112 +0000 UTC m=+154.316528726" lastFinishedPulling="2025-10-01 05:32:23.071363858 +0000 UTC m=+192.009342502" observedRunningTime="2025-10-01 05:32:23.733821972 +0000 UTC m=+192.671800616" watchObservedRunningTime="2025-10-01 05:32:23.738400156 +0000 UTC m=+192.676378780" Oct 01 05:32:23 crc kubenswrapper[4661]: I1001 05:32:23.852473 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-s4skm" Oct 01 05:32:23 crc kubenswrapper[4661]: I1001 05:32:23.852810 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-s4skm" Oct 01 05:32:24 crc kubenswrapper[4661]: I1001 05:32:24.248195 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-h9mq5" Oct 01 05:32:24 crc kubenswrapper[4661]: I1001 05:32:24.248247 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-h9mq5" Oct 01 05:32:24 crc kubenswrapper[4661]: I1001 05:32:24.987982 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-s4skm" podUID="db7ff42d-9b9d-479f-966d-768148f27414" containerName="registry-server" probeResult="failure" output=< Oct 01 05:32:24 crc kubenswrapper[4661]: timeout: failed to connect service ":50051" within 1s Oct 01 05:32:24 crc kubenswrapper[4661]: > Oct 01 05:32:25 crc kubenswrapper[4661]: I1001 05:32:25.287251 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-h9mq5" podUID="ddab3196-9370-4574-8164-5aa6fe68f83a" containerName="registry-server" probeResult="failure" output=< Oct 01 05:32:25 crc kubenswrapper[4661]: timeout: failed to connect service ":50051" within 1s Oct 01 05:32:25 crc kubenswrapper[4661]: > Oct 01 05:32:33 crc kubenswrapper[4661]: I1001 05:32:33.926018 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-s4skm" Oct 01 05:32:33 crc kubenswrapper[4661]: I1001 05:32:33.986338 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-s4skm" Oct 01 05:32:34 crc kubenswrapper[4661]: I1001 05:32:34.309484 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:32:34 crc kubenswrapper[4661]: I1001 05:32:34.310029 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:32:34 crc kubenswrapper[4661]: I1001 05:32:34.310292 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 05:32:34 crc kubenswrapper[4661]: I1001 05:32:34.311902 4661 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb"} pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 05:32:34 crc kubenswrapper[4661]: I1001 05:32:34.313973 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" containerID="cri-o://c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb" gracePeriod=600 Oct 01 05:32:34 crc kubenswrapper[4661]: I1001 05:32:34.314609 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-h9mq5" Oct 01 05:32:34 crc kubenswrapper[4661]: I1001 05:32:34.370281 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-h9mq5" Oct 01 05:32:35 crc kubenswrapper[4661]: I1001 05:32:35.205753 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h9mq5"] Oct 01 05:32:35 crc kubenswrapper[4661]: I1001 05:32:35.731198 4661 generic.go:334] "Generic (PLEG): container finished" podID="7584c4bc-4202-487e-a2b4-4319f428a792" containerID="c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb" exitCode=0 Oct 01 05:32:35 crc kubenswrapper[4661]: I1001 05:32:35.731314 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerDied","Data":"c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb"} Oct 01 05:32:35 crc kubenswrapper[4661]: I1001 05:32:35.731429 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-h9mq5" podUID="ddab3196-9370-4574-8164-5aa6fe68f83a" containerName="registry-server" containerID="cri-o://139d8cb207bee2680890cc416be6f1254a102064194836811e029d5153e35a80" gracePeriod=2 Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.704292 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h9mq5" Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.739276 4661 generic.go:334] "Generic (PLEG): container finished" podID="ddab3196-9370-4574-8164-5aa6fe68f83a" containerID="139d8cb207bee2680890cc416be6f1254a102064194836811e029d5153e35a80" exitCode=0 Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.739327 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h9mq5" event={"ID":"ddab3196-9370-4574-8164-5aa6fe68f83a","Type":"ContainerDied","Data":"139d8cb207bee2680890cc416be6f1254a102064194836811e029d5153e35a80"} Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.739360 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h9mq5" event={"ID":"ddab3196-9370-4574-8164-5aa6fe68f83a","Type":"ContainerDied","Data":"cd6b9cec5c30bd39b01f526ca1b26aa6a097ab745d3cbe4f5d8e156e3e945c71"} Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.739367 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h9mq5" Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.739382 4661 scope.go:117] "RemoveContainer" containerID="139d8cb207bee2680890cc416be6f1254a102064194836811e029d5153e35a80" Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.759601 4661 scope.go:117] "RemoveContainer" containerID="0b66a99f4960876fd279fcb81c8cfc8d939bedf027fe900c86dfb9a54dc9dc07" Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.800056 4661 scope.go:117] "RemoveContainer" containerID="91ec6c172eca60d1b6bcf8e0690c6dc67b518409120f2650c028c5b1ec95cb6d" Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.816752 4661 scope.go:117] "RemoveContainer" containerID="139d8cb207bee2680890cc416be6f1254a102064194836811e029d5153e35a80" Oct 01 05:32:36 crc kubenswrapper[4661]: E1001 05:32:36.817328 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"139d8cb207bee2680890cc416be6f1254a102064194836811e029d5153e35a80\": container with ID starting with 139d8cb207bee2680890cc416be6f1254a102064194836811e029d5153e35a80 not found: ID does not exist" containerID="139d8cb207bee2680890cc416be6f1254a102064194836811e029d5153e35a80" Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.817381 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"139d8cb207bee2680890cc416be6f1254a102064194836811e029d5153e35a80"} err="failed to get container status \"139d8cb207bee2680890cc416be6f1254a102064194836811e029d5153e35a80\": rpc error: code = NotFound desc = could not find container \"139d8cb207bee2680890cc416be6f1254a102064194836811e029d5153e35a80\": container with ID starting with 139d8cb207bee2680890cc416be6f1254a102064194836811e029d5153e35a80 not found: ID does not exist" Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.817413 4661 scope.go:117] "RemoveContainer" containerID="0b66a99f4960876fd279fcb81c8cfc8d939bedf027fe900c86dfb9a54dc9dc07" Oct 01 05:32:36 crc kubenswrapper[4661]: E1001 05:32:36.817720 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b66a99f4960876fd279fcb81c8cfc8d939bedf027fe900c86dfb9a54dc9dc07\": container with ID starting with 0b66a99f4960876fd279fcb81c8cfc8d939bedf027fe900c86dfb9a54dc9dc07 not found: ID does not exist" containerID="0b66a99f4960876fd279fcb81c8cfc8d939bedf027fe900c86dfb9a54dc9dc07" Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.817752 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b66a99f4960876fd279fcb81c8cfc8d939bedf027fe900c86dfb9a54dc9dc07"} err="failed to get container status \"0b66a99f4960876fd279fcb81c8cfc8d939bedf027fe900c86dfb9a54dc9dc07\": rpc error: code = NotFound desc = could not find container \"0b66a99f4960876fd279fcb81c8cfc8d939bedf027fe900c86dfb9a54dc9dc07\": container with ID starting with 0b66a99f4960876fd279fcb81c8cfc8d939bedf027fe900c86dfb9a54dc9dc07 not found: ID does not exist" Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.817769 4661 scope.go:117] "RemoveContainer" containerID="91ec6c172eca60d1b6bcf8e0690c6dc67b518409120f2650c028c5b1ec95cb6d" Oct 01 05:32:36 crc kubenswrapper[4661]: E1001 05:32:36.818220 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91ec6c172eca60d1b6bcf8e0690c6dc67b518409120f2650c028c5b1ec95cb6d\": container with ID starting with 91ec6c172eca60d1b6bcf8e0690c6dc67b518409120f2650c028c5b1ec95cb6d not found: ID does not exist" containerID="91ec6c172eca60d1b6bcf8e0690c6dc67b518409120f2650c028c5b1ec95cb6d" Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.818253 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91ec6c172eca60d1b6bcf8e0690c6dc67b518409120f2650c028c5b1ec95cb6d"} err="failed to get container status \"91ec6c172eca60d1b6bcf8e0690c6dc67b518409120f2650c028c5b1ec95cb6d\": rpc error: code = NotFound desc = could not find container \"91ec6c172eca60d1b6bcf8e0690c6dc67b518409120f2650c028c5b1ec95cb6d\": container with ID starting with 91ec6c172eca60d1b6bcf8e0690c6dc67b518409120f2650c028c5b1ec95cb6d not found: ID does not exist" Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.882748 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7m2f\" (UniqueName: \"kubernetes.io/projected/ddab3196-9370-4574-8164-5aa6fe68f83a-kube-api-access-g7m2f\") pod \"ddab3196-9370-4574-8164-5aa6fe68f83a\" (UID: \"ddab3196-9370-4574-8164-5aa6fe68f83a\") " Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.882807 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddab3196-9370-4574-8164-5aa6fe68f83a-catalog-content\") pod \"ddab3196-9370-4574-8164-5aa6fe68f83a\" (UID: \"ddab3196-9370-4574-8164-5aa6fe68f83a\") " Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.882844 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddab3196-9370-4574-8164-5aa6fe68f83a-utilities\") pod \"ddab3196-9370-4574-8164-5aa6fe68f83a\" (UID: \"ddab3196-9370-4574-8164-5aa6fe68f83a\") " Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.885488 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ddab3196-9370-4574-8164-5aa6fe68f83a-utilities" (OuterVolumeSpecName: "utilities") pod "ddab3196-9370-4574-8164-5aa6fe68f83a" (UID: "ddab3196-9370-4574-8164-5aa6fe68f83a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.889351 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddab3196-9370-4574-8164-5aa6fe68f83a-kube-api-access-g7m2f" (OuterVolumeSpecName: "kube-api-access-g7m2f") pod "ddab3196-9370-4574-8164-5aa6fe68f83a" (UID: "ddab3196-9370-4574-8164-5aa6fe68f83a"). InnerVolumeSpecName "kube-api-access-g7m2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.972468 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ddab3196-9370-4574-8164-5aa6fe68f83a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ddab3196-9370-4574-8164-5aa6fe68f83a" (UID: "ddab3196-9370-4574-8164-5aa6fe68f83a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.984974 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7m2f\" (UniqueName: \"kubernetes.io/projected/ddab3196-9370-4574-8164-5aa6fe68f83a-kube-api-access-g7m2f\") on node \"crc\" DevicePath \"\"" Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.985479 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddab3196-9370-4574-8164-5aa6fe68f83a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 05:32:36 crc kubenswrapper[4661]: I1001 05:32:36.985496 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddab3196-9370-4574-8164-5aa6fe68f83a-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 05:32:37 crc kubenswrapper[4661]: I1001 05:32:37.349788 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h9mq5"] Oct 01 05:32:37 crc kubenswrapper[4661]: I1001 05:32:37.353563 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-h9mq5"] Oct 01 05:32:37 crc kubenswrapper[4661]: I1001 05:32:37.747612 4661 generic.go:334] "Generic (PLEG): container finished" podID="6e6c3210-02ed-40f9-8e61-2a0ba0141ba7" containerID="6667489d39c72c422c6a3bfa3ee0f2362cf14f16d6129b3e7d07bac5939ee4e7" exitCode=0 Oct 01 05:32:37 crc kubenswrapper[4661]: I1001 05:32:37.747797 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dtjdw" event={"ID":"6e6c3210-02ed-40f9-8e61-2a0ba0141ba7","Type":"ContainerDied","Data":"6667489d39c72c422c6a3bfa3ee0f2362cf14f16d6129b3e7d07bac5939ee4e7"} Oct 01 05:32:37 crc kubenswrapper[4661]: I1001 05:32:37.751651 4661 generic.go:334] "Generic (PLEG): container finished" podID="7984959d-6d47-4013-ad65-5f28f3c52a12" containerID="b6e94810a1eb5e5c8c54068101a4984f2d4dedc90e20c7470484a01f4c1a1a50" exitCode=0 Oct 01 05:32:37 crc kubenswrapper[4661]: I1001 05:32:37.751714 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fkg8j" event={"ID":"7984959d-6d47-4013-ad65-5f28f3c52a12","Type":"ContainerDied","Data":"b6e94810a1eb5e5c8c54068101a4984f2d4dedc90e20c7470484a01f4c1a1a50"} Oct 01 05:32:37 crc kubenswrapper[4661]: I1001 05:32:37.754572 4661 generic.go:334] "Generic (PLEG): container finished" podID="b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42" containerID="d874aecf1cc549de77315672b5b56cb3a81ee8aebd87aa2dd0fe204062644c27" exitCode=0 Oct 01 05:32:37 crc kubenswrapper[4661]: I1001 05:32:37.754697 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gn77p" event={"ID":"b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42","Type":"ContainerDied","Data":"d874aecf1cc549de77315672b5b56cb3a81ee8aebd87aa2dd0fe204062644c27"} Oct 01 05:32:37 crc kubenswrapper[4661]: I1001 05:32:37.767819 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddab3196-9370-4574-8164-5aa6fe68f83a" path="/var/lib/kubelet/pods/ddab3196-9370-4574-8164-5aa6fe68f83a/volumes" Oct 01 05:32:37 crc kubenswrapper[4661]: I1001 05:32:37.769132 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nwqhx" event={"ID":"0f018e51-8c87-444b-9d63-3dadae9cd246","Type":"ContainerStarted","Data":"8e1f10dc6ab6d7ea6f16395338cbb21390d5e112593eac650caa815d7013c96c"} Oct 01 05:32:37 crc kubenswrapper[4661]: I1001 05:32:37.772999 4661 generic.go:334] "Generic (PLEG): container finished" podID="d9e824d9-0601-424c-8e2a-e9eb8a958086" containerID="a5e836c818ddf9291c9a2972738fd569913752dfb6349b32fdfc3aaafa6ac9c9" exitCode=0 Oct 01 05:32:37 crc kubenswrapper[4661]: I1001 05:32:37.773157 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-87grd" event={"ID":"d9e824d9-0601-424c-8e2a-e9eb8a958086","Type":"ContainerDied","Data":"a5e836c818ddf9291c9a2972738fd569913752dfb6349b32fdfc3aaafa6ac9c9"} Oct 01 05:32:37 crc kubenswrapper[4661]: I1001 05:32:37.796106 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"4d486eb4269c2cf09c13820c127de1ef573cf9774ba7665e8bb89052abf03b38"} Oct 01 05:32:37 crc kubenswrapper[4661]: I1001 05:32:37.803675 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x4zzv" event={"ID":"2f940144-59ee-4af8-85a3-34f55b975463","Type":"ContainerStarted","Data":"078bf2b379633eda592fd9b5b2994df21d2646ae0cca441448c2bf7bc3c2e605"} Oct 01 05:32:38 crc kubenswrapper[4661]: I1001 05:32:38.811798 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-87grd" event={"ID":"d9e824d9-0601-424c-8e2a-e9eb8a958086","Type":"ContainerStarted","Data":"2e725190dddcf8ec5d2fd0f021fc3ed49797c1c6de11829a3c5ef5d01325b20d"} Oct 01 05:32:38 crc kubenswrapper[4661]: I1001 05:32:38.813454 4661 generic.go:334] "Generic (PLEG): container finished" podID="2f940144-59ee-4af8-85a3-34f55b975463" containerID="078bf2b379633eda592fd9b5b2994df21d2646ae0cca441448c2bf7bc3c2e605" exitCode=0 Oct 01 05:32:38 crc kubenswrapper[4661]: I1001 05:32:38.813492 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x4zzv" event={"ID":"2f940144-59ee-4af8-85a3-34f55b975463","Type":"ContainerDied","Data":"078bf2b379633eda592fd9b5b2994df21d2646ae0cca441448c2bf7bc3c2e605"} Oct 01 05:32:38 crc kubenswrapper[4661]: I1001 05:32:38.817153 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dtjdw" event={"ID":"6e6c3210-02ed-40f9-8e61-2a0ba0141ba7","Type":"ContainerStarted","Data":"87b0259cdd569c022111c6e1317f47167ac3e0965fda66d49994156b3da29d94"} Oct 01 05:32:38 crc kubenswrapper[4661]: I1001 05:32:38.819263 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fkg8j" event={"ID":"7984959d-6d47-4013-ad65-5f28f3c52a12","Type":"ContainerStarted","Data":"ee8f78f9adc8ae3eca826bbfd04f2b77e2f41ef1fa7443f8fd3a98f616416ae7"} Oct 01 05:32:38 crc kubenswrapper[4661]: I1001 05:32:38.822271 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gn77p" event={"ID":"b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42","Type":"ContainerStarted","Data":"c3c123bd00d046e75e998aee240163af57623ae2dddb493adf8f99c48f2b59ac"} Oct 01 05:32:38 crc kubenswrapper[4661]: I1001 05:32:38.824388 4661 generic.go:334] "Generic (PLEG): container finished" podID="0f018e51-8c87-444b-9d63-3dadae9cd246" containerID="8e1f10dc6ab6d7ea6f16395338cbb21390d5e112593eac650caa815d7013c96c" exitCode=0 Oct 01 05:32:38 crc kubenswrapper[4661]: I1001 05:32:38.824845 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nwqhx" event={"ID":"0f018e51-8c87-444b-9d63-3dadae9cd246","Type":"ContainerDied","Data":"8e1f10dc6ab6d7ea6f16395338cbb21390d5e112593eac650caa815d7013c96c"} Oct 01 05:32:38 crc kubenswrapper[4661]: I1001 05:32:38.832221 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-87grd" podStartSLOduration=2.5790877229999998 podStartE2EDuration="58.832207567s" podCreationTimestamp="2025-10-01 05:31:40 +0000 UTC" firstStartedPulling="2025-10-01 05:31:42.210560632 +0000 UTC m=+151.148539246" lastFinishedPulling="2025-10-01 05:32:38.463680476 +0000 UTC m=+207.401659090" observedRunningTime="2025-10-01 05:32:38.8312014 +0000 UTC m=+207.769180024" watchObservedRunningTime="2025-10-01 05:32:38.832207567 +0000 UTC m=+207.770186181" Oct 01 05:32:38 crc kubenswrapper[4661]: I1001 05:32:38.870269 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gn77p" podStartSLOduration=2.81694272 podStartE2EDuration="56.870246021s" podCreationTimestamp="2025-10-01 05:31:42 +0000 UTC" firstStartedPulling="2025-10-01 05:31:44.281618865 +0000 UTC m=+153.219597479" lastFinishedPulling="2025-10-01 05:32:38.334922136 +0000 UTC m=+207.272900780" observedRunningTime="2025-10-01 05:32:38.86512544 +0000 UTC m=+207.803104044" watchObservedRunningTime="2025-10-01 05:32:38.870246021 +0000 UTC m=+207.808224675" Oct 01 05:32:38 crc kubenswrapper[4661]: I1001 05:32:38.887993 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dtjdw" podStartSLOduration=2.773245212 podStartE2EDuration="58.887972396s" podCreationTimestamp="2025-10-01 05:31:40 +0000 UTC" firstStartedPulling="2025-10-01 05:31:42.224584962 +0000 UTC m=+151.162563576" lastFinishedPulling="2025-10-01 05:32:38.339312116 +0000 UTC m=+207.277290760" observedRunningTime="2025-10-01 05:32:38.883952346 +0000 UTC m=+207.821930960" watchObservedRunningTime="2025-10-01 05:32:38.887972396 +0000 UTC m=+207.825951010" Oct 01 05:32:38 crc kubenswrapper[4661]: I1001 05:32:38.902939 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fkg8j" podStartSLOduration=2.944290906 podStartE2EDuration="56.902921907s" podCreationTimestamp="2025-10-01 05:31:42 +0000 UTC" firstStartedPulling="2025-10-01 05:31:44.299406927 +0000 UTC m=+153.237385531" lastFinishedPulling="2025-10-01 05:32:38.258037898 +0000 UTC m=+207.196016532" observedRunningTime="2025-10-01 05:32:38.900436788 +0000 UTC m=+207.838415392" watchObservedRunningTime="2025-10-01 05:32:38.902921907 +0000 UTC m=+207.840900521" Oct 01 05:32:39 crc kubenswrapper[4661]: I1001 05:32:39.831485 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nwqhx" event={"ID":"0f018e51-8c87-444b-9d63-3dadae9cd246","Type":"ContainerStarted","Data":"6af38de2cb8cd060e8ab296170ea714ac94abc58b39a0ece5a2903641f87b590"} Oct 01 05:32:39 crc kubenswrapper[4661]: I1001 05:32:39.833615 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x4zzv" event={"ID":"2f940144-59ee-4af8-85a3-34f55b975463","Type":"ContainerStarted","Data":"76964cf0e6da5cd996f7bac556d4652f83052bac8495100665c7158165cc4e08"} Oct 01 05:32:39 crc kubenswrapper[4661]: I1001 05:32:39.851394 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-nwqhx" podStartSLOduration=2.859052177 podStartE2EDuration="59.851378906s" podCreationTimestamp="2025-10-01 05:31:40 +0000 UTC" firstStartedPulling="2025-10-01 05:31:42.255974435 +0000 UTC m=+151.193953049" lastFinishedPulling="2025-10-01 05:32:39.248301164 +0000 UTC m=+208.186279778" observedRunningTime="2025-10-01 05:32:39.850551163 +0000 UTC m=+208.788529777" watchObservedRunningTime="2025-10-01 05:32:39.851378906 +0000 UTC m=+208.789357520" Oct 01 05:32:39 crc kubenswrapper[4661]: I1001 05:32:39.869948 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-x4zzv" podStartSLOduration=2.8645758690000003 podStartE2EDuration="59.869930865s" podCreationTimestamp="2025-10-01 05:31:40 +0000 UTC" firstStartedPulling="2025-10-01 05:31:42.217026648 +0000 UTC m=+151.155005262" lastFinishedPulling="2025-10-01 05:32:39.222381644 +0000 UTC m=+208.160360258" observedRunningTime="2025-10-01 05:32:39.865419471 +0000 UTC m=+208.803398085" watchObservedRunningTime="2025-10-01 05:32:39.869930865 +0000 UTC m=+208.807909479" Oct 01 05:32:40 crc kubenswrapper[4661]: I1001 05:32:40.854673 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dtjdw" Oct 01 05:32:40 crc kubenswrapper[4661]: I1001 05:32:40.854902 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dtjdw" Oct 01 05:32:40 crc kubenswrapper[4661]: I1001 05:32:40.903785 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dtjdw" Oct 01 05:32:41 crc kubenswrapper[4661]: I1001 05:32:41.273159 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-87grd" Oct 01 05:32:41 crc kubenswrapper[4661]: I1001 05:32:41.273302 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-87grd" Oct 01 05:32:41 crc kubenswrapper[4661]: I1001 05:32:41.340685 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-87grd" Oct 01 05:32:41 crc kubenswrapper[4661]: I1001 05:32:41.640332 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-nwqhx" Oct 01 05:32:41 crc kubenswrapper[4661]: I1001 05:32:41.642463 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-nwqhx" Oct 01 05:32:41 crc kubenswrapper[4661]: I1001 05:32:41.680281 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-nwqhx" Oct 01 05:32:41 crc kubenswrapper[4661]: I1001 05:32:41.724625 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-x4zzv" Oct 01 05:32:41 crc kubenswrapper[4661]: I1001 05:32:41.724869 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-x4zzv" Oct 01 05:32:41 crc kubenswrapper[4661]: I1001 05:32:41.766831 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-x4zzv" Oct 01 05:32:42 crc kubenswrapper[4661]: I1001 05:32:42.705469 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-j9mhf"] Oct 01 05:32:42 crc kubenswrapper[4661]: I1001 05:32:42.842207 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gn77p" Oct 01 05:32:42 crc kubenswrapper[4661]: I1001 05:32:42.842496 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gn77p" Oct 01 05:32:42 crc kubenswrapper[4661]: I1001 05:32:42.889599 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gn77p" Oct 01 05:32:43 crc kubenswrapper[4661]: I1001 05:32:43.536783 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fkg8j" Oct 01 05:32:43 crc kubenswrapper[4661]: I1001 05:32:43.536843 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fkg8j" Oct 01 05:32:43 crc kubenswrapper[4661]: I1001 05:32:43.575653 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fkg8j" Oct 01 05:32:43 crc kubenswrapper[4661]: I1001 05:32:43.926064 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fkg8j" Oct 01 05:32:45 crc kubenswrapper[4661]: I1001 05:32:45.601108 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fkg8j"] Oct 01 05:32:45 crc kubenswrapper[4661]: I1001 05:32:45.877154 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fkg8j" podUID="7984959d-6d47-4013-ad65-5f28f3c52a12" containerName="registry-server" containerID="cri-o://ee8f78f9adc8ae3eca826bbfd04f2b77e2f41ef1fa7443f8fd3a98f616416ae7" gracePeriod=2 Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.241891 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fkg8j" Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.407927 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7984959d-6d47-4013-ad65-5f28f3c52a12-utilities\") pod \"7984959d-6d47-4013-ad65-5f28f3c52a12\" (UID: \"7984959d-6d47-4013-ad65-5f28f3c52a12\") " Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.408101 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7984959d-6d47-4013-ad65-5f28f3c52a12-catalog-content\") pod \"7984959d-6d47-4013-ad65-5f28f3c52a12\" (UID: \"7984959d-6d47-4013-ad65-5f28f3c52a12\") " Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.408159 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldsxt\" (UniqueName: \"kubernetes.io/projected/7984959d-6d47-4013-ad65-5f28f3c52a12-kube-api-access-ldsxt\") pod \"7984959d-6d47-4013-ad65-5f28f3c52a12\" (UID: \"7984959d-6d47-4013-ad65-5f28f3c52a12\") " Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.409692 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7984959d-6d47-4013-ad65-5f28f3c52a12-utilities" (OuterVolumeSpecName: "utilities") pod "7984959d-6d47-4013-ad65-5f28f3c52a12" (UID: "7984959d-6d47-4013-ad65-5f28f3c52a12"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.416900 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7984959d-6d47-4013-ad65-5f28f3c52a12-kube-api-access-ldsxt" (OuterVolumeSpecName: "kube-api-access-ldsxt") pod "7984959d-6d47-4013-ad65-5f28f3c52a12" (UID: "7984959d-6d47-4013-ad65-5f28f3c52a12"). InnerVolumeSpecName "kube-api-access-ldsxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.434088 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7984959d-6d47-4013-ad65-5f28f3c52a12-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7984959d-6d47-4013-ad65-5f28f3c52a12" (UID: "7984959d-6d47-4013-ad65-5f28f3c52a12"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.509449 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7984959d-6d47-4013-ad65-5f28f3c52a12-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.509494 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldsxt\" (UniqueName: \"kubernetes.io/projected/7984959d-6d47-4013-ad65-5f28f3c52a12-kube-api-access-ldsxt\") on node \"crc\" DevicePath \"\"" Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.509510 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7984959d-6d47-4013-ad65-5f28f3c52a12-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.883581 4661 generic.go:334] "Generic (PLEG): container finished" podID="7984959d-6d47-4013-ad65-5f28f3c52a12" containerID="ee8f78f9adc8ae3eca826bbfd04f2b77e2f41ef1fa7443f8fd3a98f616416ae7" exitCode=0 Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.883649 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fkg8j" event={"ID":"7984959d-6d47-4013-ad65-5f28f3c52a12","Type":"ContainerDied","Data":"ee8f78f9adc8ae3eca826bbfd04f2b77e2f41ef1fa7443f8fd3a98f616416ae7"} Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.883688 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fkg8j" event={"ID":"7984959d-6d47-4013-ad65-5f28f3c52a12","Type":"ContainerDied","Data":"891f14a08eea091a2c7f6d2ad3a5b00b1c0d5cd36bfc3daf8bb787f6b125f5a5"} Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.883687 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fkg8j" Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.883710 4661 scope.go:117] "RemoveContainer" containerID="ee8f78f9adc8ae3eca826bbfd04f2b77e2f41ef1fa7443f8fd3a98f616416ae7" Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.897214 4661 scope.go:117] "RemoveContainer" containerID="b6e94810a1eb5e5c8c54068101a4984f2d4dedc90e20c7470484a01f4c1a1a50" Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.914699 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fkg8j"] Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.917224 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fkg8j"] Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.924726 4661 scope.go:117] "RemoveContainer" containerID="59965df2e48962788036426a03cb8e4050b72cec4c3f77ce5df028c11ffab720" Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.942802 4661 scope.go:117] "RemoveContainer" containerID="ee8f78f9adc8ae3eca826bbfd04f2b77e2f41ef1fa7443f8fd3a98f616416ae7" Oct 01 05:32:46 crc kubenswrapper[4661]: E1001 05:32:46.943135 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee8f78f9adc8ae3eca826bbfd04f2b77e2f41ef1fa7443f8fd3a98f616416ae7\": container with ID starting with ee8f78f9adc8ae3eca826bbfd04f2b77e2f41ef1fa7443f8fd3a98f616416ae7 not found: ID does not exist" containerID="ee8f78f9adc8ae3eca826bbfd04f2b77e2f41ef1fa7443f8fd3a98f616416ae7" Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.943162 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee8f78f9adc8ae3eca826bbfd04f2b77e2f41ef1fa7443f8fd3a98f616416ae7"} err="failed to get container status \"ee8f78f9adc8ae3eca826bbfd04f2b77e2f41ef1fa7443f8fd3a98f616416ae7\": rpc error: code = NotFound desc = could not find container \"ee8f78f9adc8ae3eca826bbfd04f2b77e2f41ef1fa7443f8fd3a98f616416ae7\": container with ID starting with ee8f78f9adc8ae3eca826bbfd04f2b77e2f41ef1fa7443f8fd3a98f616416ae7 not found: ID does not exist" Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.943187 4661 scope.go:117] "RemoveContainer" containerID="b6e94810a1eb5e5c8c54068101a4984f2d4dedc90e20c7470484a01f4c1a1a50" Oct 01 05:32:46 crc kubenswrapper[4661]: E1001 05:32:46.944091 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6e94810a1eb5e5c8c54068101a4984f2d4dedc90e20c7470484a01f4c1a1a50\": container with ID starting with b6e94810a1eb5e5c8c54068101a4984f2d4dedc90e20c7470484a01f4c1a1a50 not found: ID does not exist" containerID="b6e94810a1eb5e5c8c54068101a4984f2d4dedc90e20c7470484a01f4c1a1a50" Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.944116 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6e94810a1eb5e5c8c54068101a4984f2d4dedc90e20c7470484a01f4c1a1a50"} err="failed to get container status \"b6e94810a1eb5e5c8c54068101a4984f2d4dedc90e20c7470484a01f4c1a1a50\": rpc error: code = NotFound desc = could not find container \"b6e94810a1eb5e5c8c54068101a4984f2d4dedc90e20c7470484a01f4c1a1a50\": container with ID starting with b6e94810a1eb5e5c8c54068101a4984f2d4dedc90e20c7470484a01f4c1a1a50 not found: ID does not exist" Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.944157 4661 scope.go:117] "RemoveContainer" containerID="59965df2e48962788036426a03cb8e4050b72cec4c3f77ce5df028c11ffab720" Oct 01 05:32:46 crc kubenswrapper[4661]: E1001 05:32:46.944494 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59965df2e48962788036426a03cb8e4050b72cec4c3f77ce5df028c11ffab720\": container with ID starting with 59965df2e48962788036426a03cb8e4050b72cec4c3f77ce5df028c11ffab720 not found: ID does not exist" containerID="59965df2e48962788036426a03cb8e4050b72cec4c3f77ce5df028c11ffab720" Oct 01 05:32:46 crc kubenswrapper[4661]: I1001 05:32:46.944522 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59965df2e48962788036426a03cb8e4050b72cec4c3f77ce5df028c11ffab720"} err="failed to get container status \"59965df2e48962788036426a03cb8e4050b72cec4c3f77ce5df028c11ffab720\": rpc error: code = NotFound desc = could not find container \"59965df2e48962788036426a03cb8e4050b72cec4c3f77ce5df028c11ffab720\": container with ID starting with 59965df2e48962788036426a03cb8e4050b72cec4c3f77ce5df028c11ffab720 not found: ID does not exist" Oct 01 05:32:47 crc kubenswrapper[4661]: I1001 05:32:47.763481 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7984959d-6d47-4013-ad65-5f28f3c52a12" path="/var/lib/kubelet/pods/7984959d-6d47-4013-ad65-5f28f3c52a12/volumes" Oct 01 05:32:50 crc kubenswrapper[4661]: I1001 05:32:50.896323 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dtjdw" Oct 01 05:32:51 crc kubenswrapper[4661]: I1001 05:32:51.304547 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-87grd" Oct 01 05:32:51 crc kubenswrapper[4661]: I1001 05:32:51.441409 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-87grd"] Oct 01 05:32:51 crc kubenswrapper[4661]: I1001 05:32:51.678561 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-nwqhx" Oct 01 05:32:51 crc kubenswrapper[4661]: I1001 05:32:51.763484 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-x4zzv" Oct 01 05:32:51 crc kubenswrapper[4661]: I1001 05:32:51.912118 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-87grd" podUID="d9e824d9-0601-424c-8e2a-e9eb8a958086" containerName="registry-server" containerID="cri-o://2e725190dddcf8ec5d2fd0f021fc3ed49797c1c6de11829a3c5ef5d01325b20d" gracePeriod=2 Oct 01 05:32:52 crc kubenswrapper[4661]: I1001 05:32:52.880423 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gn77p" Oct 01 05:32:52 crc kubenswrapper[4661]: I1001 05:32:52.922993 4661 generic.go:334] "Generic (PLEG): container finished" podID="d9e824d9-0601-424c-8e2a-e9eb8a958086" containerID="2e725190dddcf8ec5d2fd0f021fc3ed49797c1c6de11829a3c5ef5d01325b20d" exitCode=0 Oct 01 05:32:52 crc kubenswrapper[4661]: I1001 05:32:52.923114 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-87grd" event={"ID":"d9e824d9-0601-424c-8e2a-e9eb8a958086","Type":"ContainerDied","Data":"2e725190dddcf8ec5d2fd0f021fc3ed49797c1c6de11829a3c5ef5d01325b20d"} Oct 01 05:32:53 crc kubenswrapper[4661]: I1001 05:32:53.930158 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-87grd" event={"ID":"d9e824d9-0601-424c-8e2a-e9eb8a958086","Type":"ContainerDied","Data":"65ec5cae899762601e9e7e6e957d8d091c4359a7635c10acc39c5016bb529323"} Oct 01 05:32:53 crc kubenswrapper[4661]: I1001 05:32:53.930360 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65ec5cae899762601e9e7e6e957d8d091c4359a7635c10acc39c5016bb529323" Oct 01 05:32:53 crc kubenswrapper[4661]: I1001 05:32:53.948147 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-87grd" Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.006726 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x4zzv"] Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.006977 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-x4zzv" podUID="2f940144-59ee-4af8-85a3-34f55b975463" containerName="registry-server" containerID="cri-o://76964cf0e6da5cd996f7bac556d4652f83052bac8495100665c7158165cc4e08" gracePeriod=2 Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.106807 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4pvk\" (UniqueName: \"kubernetes.io/projected/d9e824d9-0601-424c-8e2a-e9eb8a958086-kube-api-access-v4pvk\") pod \"d9e824d9-0601-424c-8e2a-e9eb8a958086\" (UID: \"d9e824d9-0601-424c-8e2a-e9eb8a958086\") " Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.106883 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9e824d9-0601-424c-8e2a-e9eb8a958086-utilities\") pod \"d9e824d9-0601-424c-8e2a-e9eb8a958086\" (UID: \"d9e824d9-0601-424c-8e2a-e9eb8a958086\") " Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.106935 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9e824d9-0601-424c-8e2a-e9eb8a958086-catalog-content\") pod \"d9e824d9-0601-424c-8e2a-e9eb8a958086\" (UID: \"d9e824d9-0601-424c-8e2a-e9eb8a958086\") " Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.107796 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9e824d9-0601-424c-8e2a-e9eb8a958086-utilities" (OuterVolumeSpecName: "utilities") pod "d9e824d9-0601-424c-8e2a-e9eb8a958086" (UID: "d9e824d9-0601-424c-8e2a-e9eb8a958086"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.116351 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9e824d9-0601-424c-8e2a-e9eb8a958086-kube-api-access-v4pvk" (OuterVolumeSpecName: "kube-api-access-v4pvk") pod "d9e824d9-0601-424c-8e2a-e9eb8a958086" (UID: "d9e824d9-0601-424c-8e2a-e9eb8a958086"). InnerVolumeSpecName "kube-api-access-v4pvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.149173 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9e824d9-0601-424c-8e2a-e9eb8a958086-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d9e824d9-0601-424c-8e2a-e9eb8a958086" (UID: "d9e824d9-0601-424c-8e2a-e9eb8a958086"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.208090 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4pvk\" (UniqueName: \"kubernetes.io/projected/d9e824d9-0601-424c-8e2a-e9eb8a958086-kube-api-access-v4pvk\") on node \"crc\" DevicePath \"\"" Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.208331 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9e824d9-0601-424c-8e2a-e9eb8a958086-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.208340 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9e824d9-0601-424c-8e2a-e9eb8a958086-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.339056 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x4zzv" Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.511996 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f940144-59ee-4af8-85a3-34f55b975463-catalog-content\") pod \"2f940144-59ee-4af8-85a3-34f55b975463\" (UID: \"2f940144-59ee-4af8-85a3-34f55b975463\") " Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.512046 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xbv4g\" (UniqueName: \"kubernetes.io/projected/2f940144-59ee-4af8-85a3-34f55b975463-kube-api-access-xbv4g\") pod \"2f940144-59ee-4af8-85a3-34f55b975463\" (UID: \"2f940144-59ee-4af8-85a3-34f55b975463\") " Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.512086 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f940144-59ee-4af8-85a3-34f55b975463-utilities\") pod \"2f940144-59ee-4af8-85a3-34f55b975463\" (UID: \"2f940144-59ee-4af8-85a3-34f55b975463\") " Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.512942 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f940144-59ee-4af8-85a3-34f55b975463-utilities" (OuterVolumeSpecName: "utilities") pod "2f940144-59ee-4af8-85a3-34f55b975463" (UID: "2f940144-59ee-4af8-85a3-34f55b975463"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.515895 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f940144-59ee-4af8-85a3-34f55b975463-kube-api-access-xbv4g" (OuterVolumeSpecName: "kube-api-access-xbv4g") pod "2f940144-59ee-4af8-85a3-34f55b975463" (UID: "2f940144-59ee-4af8-85a3-34f55b975463"). InnerVolumeSpecName "kube-api-access-xbv4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.567039 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f940144-59ee-4af8-85a3-34f55b975463-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2f940144-59ee-4af8-85a3-34f55b975463" (UID: "2f940144-59ee-4af8-85a3-34f55b975463"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.613223 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f940144-59ee-4af8-85a3-34f55b975463-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.613261 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xbv4g\" (UniqueName: \"kubernetes.io/projected/2f940144-59ee-4af8-85a3-34f55b975463-kube-api-access-xbv4g\") on node \"crc\" DevicePath \"\"" Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.613275 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f940144-59ee-4af8-85a3-34f55b975463-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.939255 4661 generic.go:334] "Generic (PLEG): container finished" podID="2f940144-59ee-4af8-85a3-34f55b975463" containerID="76964cf0e6da5cd996f7bac556d4652f83052bac8495100665c7158165cc4e08" exitCode=0 Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.939327 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x4zzv" Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.939360 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-87grd" Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.939393 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x4zzv" event={"ID":"2f940144-59ee-4af8-85a3-34f55b975463","Type":"ContainerDied","Data":"76964cf0e6da5cd996f7bac556d4652f83052bac8495100665c7158165cc4e08"} Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.939468 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x4zzv" event={"ID":"2f940144-59ee-4af8-85a3-34f55b975463","Type":"ContainerDied","Data":"138ae26676667f44d9b8be635b9986c155e5b43d884c76086ed0bffa99f2d1a4"} Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.939499 4661 scope.go:117] "RemoveContainer" containerID="76964cf0e6da5cd996f7bac556d4652f83052bac8495100665c7158165cc4e08" Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.961973 4661 scope.go:117] "RemoveContainer" containerID="078bf2b379633eda592fd9b5b2994df21d2646ae0cca441448c2bf7bc3c2e605" Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.971604 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-87grd"] Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.973862 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-87grd"] Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.982792 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x4zzv"] Oct 01 05:32:54 crc kubenswrapper[4661]: I1001 05:32:54.985209 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-x4zzv"] Oct 01 05:32:55 crc kubenswrapper[4661]: I1001 05:32:55.002548 4661 scope.go:117] "RemoveContainer" containerID="a47ce6337e1d83c3bf23ede780544160d3a4c89837a62297904ce1e30942cc73" Oct 01 05:32:55 crc kubenswrapper[4661]: I1001 05:32:55.014606 4661 scope.go:117] "RemoveContainer" containerID="76964cf0e6da5cd996f7bac556d4652f83052bac8495100665c7158165cc4e08" Oct 01 05:32:55 crc kubenswrapper[4661]: E1001 05:32:55.014965 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76964cf0e6da5cd996f7bac556d4652f83052bac8495100665c7158165cc4e08\": container with ID starting with 76964cf0e6da5cd996f7bac556d4652f83052bac8495100665c7158165cc4e08 not found: ID does not exist" containerID="76964cf0e6da5cd996f7bac556d4652f83052bac8495100665c7158165cc4e08" Oct 01 05:32:55 crc kubenswrapper[4661]: I1001 05:32:55.014999 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76964cf0e6da5cd996f7bac556d4652f83052bac8495100665c7158165cc4e08"} err="failed to get container status \"76964cf0e6da5cd996f7bac556d4652f83052bac8495100665c7158165cc4e08\": rpc error: code = NotFound desc = could not find container \"76964cf0e6da5cd996f7bac556d4652f83052bac8495100665c7158165cc4e08\": container with ID starting with 76964cf0e6da5cd996f7bac556d4652f83052bac8495100665c7158165cc4e08 not found: ID does not exist" Oct 01 05:32:55 crc kubenswrapper[4661]: I1001 05:32:55.015023 4661 scope.go:117] "RemoveContainer" containerID="078bf2b379633eda592fd9b5b2994df21d2646ae0cca441448c2bf7bc3c2e605" Oct 01 05:32:55 crc kubenswrapper[4661]: E1001 05:32:55.015284 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"078bf2b379633eda592fd9b5b2994df21d2646ae0cca441448c2bf7bc3c2e605\": container with ID starting with 078bf2b379633eda592fd9b5b2994df21d2646ae0cca441448c2bf7bc3c2e605 not found: ID does not exist" containerID="078bf2b379633eda592fd9b5b2994df21d2646ae0cca441448c2bf7bc3c2e605" Oct 01 05:32:55 crc kubenswrapper[4661]: I1001 05:32:55.015312 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"078bf2b379633eda592fd9b5b2994df21d2646ae0cca441448c2bf7bc3c2e605"} err="failed to get container status \"078bf2b379633eda592fd9b5b2994df21d2646ae0cca441448c2bf7bc3c2e605\": rpc error: code = NotFound desc = could not find container \"078bf2b379633eda592fd9b5b2994df21d2646ae0cca441448c2bf7bc3c2e605\": container with ID starting with 078bf2b379633eda592fd9b5b2994df21d2646ae0cca441448c2bf7bc3c2e605 not found: ID does not exist" Oct 01 05:32:55 crc kubenswrapper[4661]: I1001 05:32:55.015335 4661 scope.go:117] "RemoveContainer" containerID="a47ce6337e1d83c3bf23ede780544160d3a4c89837a62297904ce1e30942cc73" Oct 01 05:32:55 crc kubenswrapper[4661]: E1001 05:32:55.015560 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a47ce6337e1d83c3bf23ede780544160d3a4c89837a62297904ce1e30942cc73\": container with ID starting with a47ce6337e1d83c3bf23ede780544160d3a4c89837a62297904ce1e30942cc73 not found: ID does not exist" containerID="a47ce6337e1d83c3bf23ede780544160d3a4c89837a62297904ce1e30942cc73" Oct 01 05:32:55 crc kubenswrapper[4661]: I1001 05:32:55.015577 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a47ce6337e1d83c3bf23ede780544160d3a4c89837a62297904ce1e30942cc73"} err="failed to get container status \"a47ce6337e1d83c3bf23ede780544160d3a4c89837a62297904ce1e30942cc73\": rpc error: code = NotFound desc = could not find container \"a47ce6337e1d83c3bf23ede780544160d3a4c89837a62297904ce1e30942cc73\": container with ID starting with a47ce6337e1d83c3bf23ede780544160d3a4c89837a62297904ce1e30942cc73 not found: ID does not exist" Oct 01 05:32:55 crc kubenswrapper[4661]: I1001 05:32:55.763374 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f940144-59ee-4af8-85a3-34f55b975463" path="/var/lib/kubelet/pods/2f940144-59ee-4af8-85a3-34f55b975463/volumes" Oct 01 05:32:55 crc kubenswrapper[4661]: I1001 05:32:55.764059 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9e824d9-0601-424c-8e2a-e9eb8a958086" path="/var/lib/kubelet/pods/d9e824d9-0601-424c-8e2a-e9eb8a958086/volumes" Oct 01 05:33:07 crc kubenswrapper[4661]: I1001 05:33:07.735000 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" podUID="2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7" containerName="oauth-openshift" containerID="cri-o://71794a8549dea8ba3bfba8026baa29b280daa34c62bbf54339f7a4626ba47f21" gracePeriod=15 Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.015011 4661 generic.go:334] "Generic (PLEG): container finished" podID="2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7" containerID="71794a8549dea8ba3bfba8026baa29b280daa34c62bbf54339f7a4626ba47f21" exitCode=0 Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.015153 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" event={"ID":"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7","Type":"ContainerDied","Data":"71794a8549dea8ba3bfba8026baa29b280daa34c62bbf54339f7a4626ba47f21"} Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.235846 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.283497 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-775b8f44b6-ffpfm"] Oct 01 05:33:08 crc kubenswrapper[4661]: E1001 05:33:08.283860 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7984959d-6d47-4013-ad65-5f28f3c52a12" containerName="extract-content" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.283883 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="7984959d-6d47-4013-ad65-5f28f3c52a12" containerName="extract-content" Oct 01 05:33:08 crc kubenswrapper[4661]: E1001 05:33:08.283900 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f940144-59ee-4af8-85a3-34f55b975463" containerName="extract-content" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.283913 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f940144-59ee-4af8-85a3-34f55b975463" containerName="extract-content" Oct 01 05:33:08 crc kubenswrapper[4661]: E1001 05:33:08.283931 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9e824d9-0601-424c-8e2a-e9eb8a958086" containerName="extract-content" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.283944 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9e824d9-0601-424c-8e2a-e9eb8a958086" containerName="extract-content" Oct 01 05:33:08 crc kubenswrapper[4661]: E1001 05:33:08.283961 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9e824d9-0601-424c-8e2a-e9eb8a958086" containerName="registry-server" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.283974 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9e824d9-0601-424c-8e2a-e9eb8a958086" containerName="registry-server" Oct 01 05:33:08 crc kubenswrapper[4661]: E1001 05:33:08.283992 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85a71b20-a1b6-4ad2-81e4-117a51dda7f2" containerName="pruner" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.284003 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="85a71b20-a1b6-4ad2-81e4-117a51dda7f2" containerName="pruner" Oct 01 05:33:08 crc kubenswrapper[4661]: E1001 05:33:08.284020 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9e824d9-0601-424c-8e2a-e9eb8a958086" containerName="extract-utilities" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.284032 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9e824d9-0601-424c-8e2a-e9eb8a958086" containerName="extract-utilities" Oct 01 05:33:08 crc kubenswrapper[4661]: E1001 05:33:08.284049 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddab3196-9370-4574-8164-5aa6fe68f83a" containerName="extract-utilities" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.284060 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddab3196-9370-4574-8164-5aa6fe68f83a" containerName="extract-utilities" Oct 01 05:33:08 crc kubenswrapper[4661]: E1001 05:33:08.284080 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddab3196-9370-4574-8164-5aa6fe68f83a" containerName="registry-server" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.284094 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddab3196-9370-4574-8164-5aa6fe68f83a" containerName="registry-server" Oct 01 05:33:08 crc kubenswrapper[4661]: E1001 05:33:08.284110 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7984959d-6d47-4013-ad65-5f28f3c52a12" containerName="registry-server" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.284122 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="7984959d-6d47-4013-ad65-5f28f3c52a12" containerName="registry-server" Oct 01 05:33:08 crc kubenswrapper[4661]: E1001 05:33:08.284142 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddab3196-9370-4574-8164-5aa6fe68f83a" containerName="extract-content" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.284154 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddab3196-9370-4574-8164-5aa6fe68f83a" containerName="extract-content" Oct 01 05:33:08 crc kubenswrapper[4661]: E1001 05:33:08.284174 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7984959d-6d47-4013-ad65-5f28f3c52a12" containerName="extract-utilities" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.284186 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="7984959d-6d47-4013-ad65-5f28f3c52a12" containerName="extract-utilities" Oct 01 05:33:08 crc kubenswrapper[4661]: E1001 05:33:08.284204 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7" containerName="oauth-openshift" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.284215 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7" containerName="oauth-openshift" Oct 01 05:33:08 crc kubenswrapper[4661]: E1001 05:33:08.284234 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f940144-59ee-4af8-85a3-34f55b975463" containerName="registry-server" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.284246 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f940144-59ee-4af8-85a3-34f55b975463" containerName="registry-server" Oct 01 05:33:08 crc kubenswrapper[4661]: E1001 05:33:08.284266 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f940144-59ee-4af8-85a3-34f55b975463" containerName="extract-utilities" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.284279 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f940144-59ee-4af8-85a3-34f55b975463" containerName="extract-utilities" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.284426 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9e824d9-0601-424c-8e2a-e9eb8a958086" containerName="registry-server" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.284448 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="85a71b20-a1b6-4ad2-81e4-117a51dda7f2" containerName="pruner" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.284469 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddab3196-9370-4574-8164-5aa6fe68f83a" containerName="registry-server" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.284487 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="7984959d-6d47-4013-ad65-5f28f3c52a12" containerName="registry-server" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.284505 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7" containerName="oauth-openshift" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.284521 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f940144-59ee-4af8-85a3-34f55b975463" containerName="registry-server" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.289915 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.300070 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-775b8f44b6-ffpfm"] Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397187 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-session\") pod \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397238 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-trusted-ca-bundle\") pod \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397294 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-service-ca\") pod \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397316 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-router-certs\") pod \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397339 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9zgs\" (UniqueName: \"kubernetes.io/projected/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-kube-api-access-w9zgs\") pod \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397365 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-template-error\") pod \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397385 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-template-provider-selection\") pod \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397409 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-ocp-branding-template\") pod \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397451 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-idp-0-file-data\") pod \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397476 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-cliconfig\") pod \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397497 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-audit-dir\") pod \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397513 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-serving-cert\") pod \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397532 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-template-login\") pod \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397552 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-audit-policies\") pod \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\" (UID: \"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7\") " Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397676 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397707 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-system-cliconfig\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397732 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397762 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397779 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-user-template-login\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397802 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/12d62df4-a308-477f-9433-8fb8950b8206-audit-policies\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397826 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-system-router-certs\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397855 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/12d62df4-a308-477f-9433-8fb8950b8206-audit-dir\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397877 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-system-serving-cert\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397896 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvpb4\" (UniqueName: \"kubernetes.io/projected/12d62df4-a308-477f-9433-8fb8950b8206-kube-api-access-tvpb4\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397911 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397934 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-user-template-error\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397949 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-system-service-ca\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.397968 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-system-session\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.398450 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7" (UID: "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.398508 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7" (UID: "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.398719 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7" (UID: "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.399472 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7" (UID: "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.400393 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7" (UID: "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.404400 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7" (UID: "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.404981 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7" (UID: "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.405071 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-kube-api-access-w9zgs" (OuterVolumeSpecName: "kube-api-access-w9zgs") pod "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7" (UID: "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7"). InnerVolumeSpecName "kube-api-access-w9zgs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.405622 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7" (UID: "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.410361 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7" (UID: "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.417369 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7" (UID: "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.417377 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7" (UID: "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.417730 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7" (UID: "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.418146 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7" (UID: "2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.499168 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-user-template-error\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.499255 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-system-service-ca\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.499299 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-system-session\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.499342 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.499398 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-system-cliconfig\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.499450 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.499513 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.499551 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-user-template-login\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.499598 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/12d62df4-a308-477f-9433-8fb8950b8206-audit-policies\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.499705 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-system-router-certs\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.499781 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/12d62df4-a308-477f-9433-8fb8950b8206-audit-dir\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.499820 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-system-serving-cert\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.499856 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvpb4\" (UniqueName: \"kubernetes.io/projected/12d62df4-a308-477f-9433-8fb8950b8206-kube-api-access-tvpb4\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.499889 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.499974 4661 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.499998 4661 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.500021 4661 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.500042 4661 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.500063 4661 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.500083 4661 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.500102 4661 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.500125 4661 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.500146 4661 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.500165 4661 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.500185 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9zgs\" (UniqueName: \"kubernetes.io/projected/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-kube-api-access-w9zgs\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.500205 4661 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.500231 4661 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.500258 4661 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.501529 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/12d62df4-a308-477f-9433-8fb8950b8206-audit-dir\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.502301 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/12d62df4-a308-477f-9433-8fb8950b8206-audit-policies\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.502433 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-system-cliconfig\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.502522 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-system-service-ca\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.504823 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-system-router-certs\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.506102 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-system-session\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.506763 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.507215 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-system-serving-cert\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.508024 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.509103 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-user-template-error\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.509846 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.510074 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.510316 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/12d62df4-a308-477f-9433-8fb8950b8206-v4-0-config-user-template-login\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.530457 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvpb4\" (UniqueName: \"kubernetes.io/projected/12d62df4-a308-477f-9433-8fb8950b8206-kube-api-access-tvpb4\") pod \"oauth-openshift-775b8f44b6-ffpfm\" (UID: \"12d62df4-a308-477f-9433-8fb8950b8206\") " pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:08 crc kubenswrapper[4661]: I1001 05:33:08.617247 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:09 crc kubenswrapper[4661]: I1001 05:33:09.024257 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" Oct 01 05:33:09 crc kubenswrapper[4661]: I1001 05:33:09.024156 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-j9mhf" event={"ID":"2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7","Type":"ContainerDied","Data":"979d010e5c74d6c95143ab3feb6437a85863df434467a891bd61535683e27798"} Oct 01 05:33:09 crc kubenswrapper[4661]: I1001 05:33:09.028945 4661 scope.go:117] "RemoveContainer" containerID="71794a8549dea8ba3bfba8026baa29b280daa34c62bbf54339f7a4626ba47f21" Oct 01 05:33:09 crc kubenswrapper[4661]: I1001 05:33:09.085743 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-j9mhf"] Oct 01 05:33:09 crc kubenswrapper[4661]: I1001 05:33:09.089799 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-j9mhf"] Oct 01 05:33:09 crc kubenswrapper[4661]: I1001 05:33:09.162142 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-775b8f44b6-ffpfm"] Oct 01 05:33:09 crc kubenswrapper[4661]: I1001 05:33:09.773468 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7" path="/var/lib/kubelet/pods/2d2dcf37-c0a9-4cdb-a82a-c157a6609cb7/volumes" Oct 01 05:33:10 crc kubenswrapper[4661]: I1001 05:33:10.032628 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" event={"ID":"12d62df4-a308-477f-9433-8fb8950b8206","Type":"ContainerStarted","Data":"52c37618f7bf7c93dabcdbc30c121c72dc07e60e69ef1ce3deab332b314e97f8"} Oct 01 05:33:10 crc kubenswrapper[4661]: I1001 05:33:10.032711 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" event={"ID":"12d62df4-a308-477f-9433-8fb8950b8206","Type":"ContainerStarted","Data":"e39e8208c0a6e3b526402b49dbb1403ade38624c6f6a0c824e35caef5b31df45"} Oct 01 05:33:10 crc kubenswrapper[4661]: I1001 05:33:10.032942 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:10 crc kubenswrapper[4661]: I1001 05:33:10.038354 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" Oct 01 05:33:10 crc kubenswrapper[4661]: I1001 05:33:10.082548 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-775b8f44b6-ffpfm" podStartSLOduration=28.082522029 podStartE2EDuration="28.082522029s" podCreationTimestamp="2025-10-01 05:32:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:33:10.06470435 +0000 UTC m=+239.002682994" watchObservedRunningTime="2025-10-01 05:33:10.082522029 +0000 UTC m=+239.020500673" Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.574279 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dtjdw"] Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.575348 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dtjdw" podUID="6e6c3210-02ed-40f9-8e61-2a0ba0141ba7" containerName="registry-server" containerID="cri-o://87b0259cdd569c022111c6e1317f47167ac3e0965fda66d49994156b3da29d94" gracePeriod=30 Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.587812 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nwqhx"] Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.588189 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-nwqhx" podUID="0f018e51-8c87-444b-9d63-3dadae9cd246" containerName="registry-server" containerID="cri-o://6af38de2cb8cd060e8ab296170ea714ac94abc58b39a0ece5a2903641f87b590" gracePeriod=30 Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.595517 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-r5ghr"] Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.596124 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" podUID="3b56c64d-0bba-4d20-a6ae-e6c9349d0c04" containerName="marketplace-operator" containerID="cri-o://ea408f0819fd82e8baab6f0fe05c700b776828763e946e1d9efa450e2eb536e6" gracePeriod=30 Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.605710 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xpqj9"] Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.606589 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xpqj9" Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.611520 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gn77p"] Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.611821 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gn77p" podUID="b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42" containerName="registry-server" containerID="cri-o://c3c123bd00d046e75e998aee240163af57623ae2dddb493adf8f99c48f2b59ac" gracePeriod=30 Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.623373 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-s4skm"] Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.623734 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-s4skm" podUID="db7ff42d-9b9d-479f-966d-768148f27414" containerName="registry-server" containerID="cri-o://3a115a99ed504ae55e539f9822b49abfd795c43ba6c317413375aa489a64c8f0" gracePeriod=30 Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.630702 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xpqj9"] Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.669668 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mksj6\" (UniqueName: \"kubernetes.io/projected/af543e28-92e4-4c71-a1dc-1478f2c25169-kube-api-access-mksj6\") pod \"marketplace-operator-79b997595-xpqj9\" (UID: \"af543e28-92e4-4c71-a1dc-1478f2c25169\") " pod="openshift-marketplace/marketplace-operator-79b997595-xpqj9" Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.669751 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/af543e28-92e4-4c71-a1dc-1478f2c25169-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xpqj9\" (UID: \"af543e28-92e4-4c71-a1dc-1478f2c25169\") " pod="openshift-marketplace/marketplace-operator-79b997595-xpqj9" Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.669787 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/af543e28-92e4-4c71-a1dc-1478f2c25169-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xpqj9\" (UID: \"af543e28-92e4-4c71-a1dc-1478f2c25169\") " pod="openshift-marketplace/marketplace-operator-79b997595-xpqj9" Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.773183 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/af543e28-92e4-4c71-a1dc-1478f2c25169-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xpqj9\" (UID: \"af543e28-92e4-4c71-a1dc-1478f2c25169\") " pod="openshift-marketplace/marketplace-operator-79b997595-xpqj9" Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.773236 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/af543e28-92e4-4c71-a1dc-1478f2c25169-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xpqj9\" (UID: \"af543e28-92e4-4c71-a1dc-1478f2c25169\") " pod="openshift-marketplace/marketplace-operator-79b997595-xpqj9" Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.773265 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mksj6\" (UniqueName: \"kubernetes.io/projected/af543e28-92e4-4c71-a1dc-1478f2c25169-kube-api-access-mksj6\") pod \"marketplace-operator-79b997595-xpqj9\" (UID: \"af543e28-92e4-4c71-a1dc-1478f2c25169\") " pod="openshift-marketplace/marketplace-operator-79b997595-xpqj9" Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.774644 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/af543e28-92e4-4c71-a1dc-1478f2c25169-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xpqj9\" (UID: \"af543e28-92e4-4c71-a1dc-1478f2c25169\") " pod="openshift-marketplace/marketplace-operator-79b997595-xpqj9" Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.786367 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/af543e28-92e4-4c71-a1dc-1478f2c25169-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xpqj9\" (UID: \"af543e28-92e4-4c71-a1dc-1478f2c25169\") " pod="openshift-marketplace/marketplace-operator-79b997595-xpqj9" Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.795587 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mksj6\" (UniqueName: \"kubernetes.io/projected/af543e28-92e4-4c71-a1dc-1478f2c25169-kube-api-access-mksj6\") pod \"marketplace-operator-79b997595-xpqj9\" (UID: \"af543e28-92e4-4c71-a1dc-1478f2c25169\") " pod="openshift-marketplace/marketplace-operator-79b997595-xpqj9" Oct 01 05:33:35 crc kubenswrapper[4661]: I1001 05:33:35.933925 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xpqj9" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.026356 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dtjdw" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.058028 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nwqhx" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.172523 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gn77p" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.178885 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e6c3210-02ed-40f9-8e61-2a0ba0141ba7-catalog-content\") pod \"6e6c3210-02ed-40f9-8e61-2a0ba0141ba7\" (UID: \"6e6c3210-02ed-40f9-8e61-2a0ba0141ba7\") " Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.178941 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f018e51-8c87-444b-9d63-3dadae9cd246-catalog-content\") pod \"0f018e51-8c87-444b-9d63-3dadae9cd246\" (UID: \"0f018e51-8c87-444b-9d63-3dadae9cd246\") " Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.179005 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xfpc4\" (UniqueName: \"kubernetes.io/projected/0f018e51-8c87-444b-9d63-3dadae9cd246-kube-api-access-xfpc4\") pod \"0f018e51-8c87-444b-9d63-3dadae9cd246\" (UID: \"0f018e51-8c87-444b-9d63-3dadae9cd246\") " Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.179062 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e6c3210-02ed-40f9-8e61-2a0ba0141ba7-utilities\") pod \"6e6c3210-02ed-40f9-8e61-2a0ba0141ba7\" (UID: \"6e6c3210-02ed-40f9-8e61-2a0ba0141ba7\") " Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.179114 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f018e51-8c87-444b-9d63-3dadae9cd246-utilities\") pod \"0f018e51-8c87-444b-9d63-3dadae9cd246\" (UID: \"0f018e51-8c87-444b-9d63-3dadae9cd246\") " Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.179162 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cc7sv\" (UniqueName: \"kubernetes.io/projected/6e6c3210-02ed-40f9-8e61-2a0ba0141ba7-kube-api-access-cc7sv\") pod \"6e6c3210-02ed-40f9-8e61-2a0ba0141ba7\" (UID: \"6e6c3210-02ed-40f9-8e61-2a0ba0141ba7\") " Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.180252 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e6c3210-02ed-40f9-8e61-2a0ba0141ba7-utilities" (OuterVolumeSpecName: "utilities") pod "6e6c3210-02ed-40f9-8e61-2a0ba0141ba7" (UID: "6e6c3210-02ed-40f9-8e61-2a0ba0141ba7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.180352 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f018e51-8c87-444b-9d63-3dadae9cd246-utilities" (OuterVolumeSpecName: "utilities") pod "0f018e51-8c87-444b-9d63-3dadae9cd246" (UID: "0f018e51-8c87-444b-9d63-3dadae9cd246"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.202317 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f018e51-8c87-444b-9d63-3dadae9cd246-kube-api-access-xfpc4" (OuterVolumeSpecName: "kube-api-access-xfpc4") pod "0f018e51-8c87-444b-9d63-3dadae9cd246" (UID: "0f018e51-8c87-444b-9d63-3dadae9cd246"). InnerVolumeSpecName "kube-api-access-xfpc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.202865 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e6c3210-02ed-40f9-8e61-2a0ba0141ba7-kube-api-access-cc7sv" (OuterVolumeSpecName: "kube-api-access-cc7sv") pod "6e6c3210-02ed-40f9-8e61-2a0ba0141ba7" (UID: "6e6c3210-02ed-40f9-8e61-2a0ba0141ba7"). InnerVolumeSpecName "kube-api-access-cc7sv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.209189 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xpqj9"] Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.210023 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s4skm" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.217574 4661 generic.go:334] "Generic (PLEG): container finished" podID="b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42" containerID="c3c123bd00d046e75e998aee240163af57623ae2dddb493adf8f99c48f2b59ac" exitCode=0 Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.217656 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gn77p" event={"ID":"b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42","Type":"ContainerDied","Data":"c3c123bd00d046e75e998aee240163af57623ae2dddb493adf8f99c48f2b59ac"} Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.217688 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gn77p" event={"ID":"b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42","Type":"ContainerDied","Data":"9830885f6d91bfa1c904fd510c21de4452ab29888be0965e48fc87d9587272c4"} Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.217711 4661 scope.go:117] "RemoveContainer" containerID="c3c123bd00d046e75e998aee240163af57623ae2dddb493adf8f99c48f2b59ac" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.217863 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gn77p" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.218925 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.224186 4661 generic.go:334] "Generic (PLEG): container finished" podID="3b56c64d-0bba-4d20-a6ae-e6c9349d0c04" containerID="ea408f0819fd82e8baab6f0fe05c700b776828763e946e1d9efa450e2eb536e6" exitCode=0 Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.224266 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" event={"ID":"3b56c64d-0bba-4d20-a6ae-e6c9349d0c04","Type":"ContainerDied","Data":"ea408f0819fd82e8baab6f0fe05c700b776828763e946e1d9efa450e2eb536e6"} Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.224291 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" event={"ID":"3b56c64d-0bba-4d20-a6ae-e6c9349d0c04","Type":"ContainerDied","Data":"3afcf924687c003a44d02e9ff6200755026688019a06b7919c1d613ce9e6986a"} Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.234919 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e6c3210-02ed-40f9-8e61-2a0ba0141ba7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6e6c3210-02ed-40f9-8e61-2a0ba0141ba7" (UID: "6e6c3210-02ed-40f9-8e61-2a0ba0141ba7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.243697 4661 scope.go:117] "RemoveContainer" containerID="d874aecf1cc549de77315672b5b56cb3a81ee8aebd87aa2dd0fe204062644c27" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.244464 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nwqhx" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.244789 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nwqhx" event={"ID":"0f018e51-8c87-444b-9d63-3dadae9cd246","Type":"ContainerDied","Data":"6af38de2cb8cd060e8ab296170ea714ac94abc58b39a0ece5a2903641f87b590"} Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.244622 4661 generic.go:334] "Generic (PLEG): container finished" podID="0f018e51-8c87-444b-9d63-3dadae9cd246" containerID="6af38de2cb8cd060e8ab296170ea714ac94abc58b39a0ece5a2903641f87b590" exitCode=0 Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.245219 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nwqhx" event={"ID":"0f018e51-8c87-444b-9d63-3dadae9cd246","Type":"ContainerDied","Data":"54068c464d613706400da216d14ae6daf3997a2e3a76ee4efb52af6a6cbfcdc0"} Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.261905 4661 scope.go:117] "RemoveContainer" containerID="75145a8ae3c8dfc0b0c5042c6326507ef718006b0fb99658520727a37255f750" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.262356 4661 generic.go:334] "Generic (PLEG): container finished" podID="6e6c3210-02ed-40f9-8e61-2a0ba0141ba7" containerID="87b0259cdd569c022111c6e1317f47167ac3e0965fda66d49994156b3da29d94" exitCode=0 Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.262401 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dtjdw" event={"ID":"6e6c3210-02ed-40f9-8e61-2a0ba0141ba7","Type":"ContainerDied","Data":"87b0259cdd569c022111c6e1317f47167ac3e0965fda66d49994156b3da29d94"} Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.262620 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dtjdw" event={"ID":"6e6c3210-02ed-40f9-8e61-2a0ba0141ba7","Type":"ContainerDied","Data":"c2f9c5a57fcd6bf4162540c0a6c1df4ff4af757895a1f8560959b36b01860a04"} Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.262434 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dtjdw" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.266813 4661 generic.go:334] "Generic (PLEG): container finished" podID="db7ff42d-9b9d-479f-966d-768148f27414" containerID="3a115a99ed504ae55e539f9822b49abfd795c43ba6c317413375aa489a64c8f0" exitCode=0 Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.268002 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s4skm" event={"ID":"db7ff42d-9b9d-479f-966d-768148f27414","Type":"ContainerDied","Data":"3a115a99ed504ae55e539f9822b49abfd795c43ba6c317413375aa489a64c8f0"} Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.268047 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s4skm" event={"ID":"db7ff42d-9b9d-479f-966d-768148f27414","Type":"ContainerDied","Data":"7fe3fabde691c931186b6aac164bac37329e875876914866c4e5256a311c2b1a"} Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.268109 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s4skm" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.273077 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f018e51-8c87-444b-9d63-3dadae9cd246-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0f018e51-8c87-444b-9d63-3dadae9cd246" (UID: "0f018e51-8c87-444b-9d63-3dadae9cd246"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.280329 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db7ff42d-9b9d-479f-966d-768148f27414-catalog-content\") pod \"db7ff42d-9b9d-479f-966d-768148f27414\" (UID: \"db7ff42d-9b9d-479f-966d-768148f27414\") " Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.280406 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42-catalog-content\") pod \"b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42\" (UID: \"b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42\") " Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.280448 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdlfj\" (UniqueName: \"kubernetes.io/projected/db7ff42d-9b9d-479f-966d-768148f27414-kube-api-access-vdlfj\") pod \"db7ff42d-9b9d-479f-966d-768148f27414\" (UID: \"db7ff42d-9b9d-479f-966d-768148f27414\") " Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.280491 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b4htr\" (UniqueName: \"kubernetes.io/projected/b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42-kube-api-access-b4htr\") pod \"b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42\" (UID: \"b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42\") " Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.280511 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db7ff42d-9b9d-479f-966d-768148f27414-utilities\") pod \"db7ff42d-9b9d-479f-966d-768148f27414\" (UID: \"db7ff42d-9b9d-479f-966d-768148f27414\") " Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.280529 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9qxmb\" (UniqueName: \"kubernetes.io/projected/3b56c64d-0bba-4d20-a6ae-e6c9349d0c04-kube-api-access-9qxmb\") pod \"3b56c64d-0bba-4d20-a6ae-e6c9349d0c04\" (UID: \"3b56c64d-0bba-4d20-a6ae-e6c9349d0c04\") " Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.280548 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42-utilities\") pod \"b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42\" (UID: \"b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42\") " Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.280584 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3b56c64d-0bba-4d20-a6ae-e6c9349d0c04-marketplace-trusted-ca\") pod \"3b56c64d-0bba-4d20-a6ae-e6c9349d0c04\" (UID: \"3b56c64d-0bba-4d20-a6ae-e6c9349d0c04\") " Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.280604 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3b56c64d-0bba-4d20-a6ae-e6c9349d0c04-marketplace-operator-metrics\") pod \"3b56c64d-0bba-4d20-a6ae-e6c9349d0c04\" (UID: \"3b56c64d-0bba-4d20-a6ae-e6c9349d0c04\") " Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.280828 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cc7sv\" (UniqueName: \"kubernetes.io/projected/6e6c3210-02ed-40f9-8e61-2a0ba0141ba7-kube-api-access-cc7sv\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.280840 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e6c3210-02ed-40f9-8e61-2a0ba0141ba7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.280850 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f018e51-8c87-444b-9d63-3dadae9cd246-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.280858 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xfpc4\" (UniqueName: \"kubernetes.io/projected/0f018e51-8c87-444b-9d63-3dadae9cd246-kube-api-access-xfpc4\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.280869 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e6c3210-02ed-40f9-8e61-2a0ba0141ba7-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.280877 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f018e51-8c87-444b-9d63-3dadae9cd246-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.285911 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42-kube-api-access-b4htr" (OuterVolumeSpecName: "kube-api-access-b4htr") pod "b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42" (UID: "b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42"). InnerVolumeSpecName "kube-api-access-b4htr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.285969 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b56c64d-0bba-4d20-a6ae-e6c9349d0c04-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "3b56c64d-0bba-4d20-a6ae-e6c9349d0c04" (UID: "3b56c64d-0bba-4d20-a6ae-e6c9349d0c04"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.286509 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b56c64d-0bba-4d20-a6ae-e6c9349d0c04-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "3b56c64d-0bba-4d20-a6ae-e6c9349d0c04" (UID: "3b56c64d-0bba-4d20-a6ae-e6c9349d0c04"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.286931 4661 scope.go:117] "RemoveContainer" containerID="c3c123bd00d046e75e998aee240163af57623ae2dddb493adf8f99c48f2b59ac" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.287209 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42-utilities" (OuterVolumeSpecName: "utilities") pod "b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42" (UID: "b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:33:36 crc kubenswrapper[4661]: E1001 05:33:36.287400 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3c123bd00d046e75e998aee240163af57623ae2dddb493adf8f99c48f2b59ac\": container with ID starting with c3c123bd00d046e75e998aee240163af57623ae2dddb493adf8f99c48f2b59ac not found: ID does not exist" containerID="c3c123bd00d046e75e998aee240163af57623ae2dddb493adf8f99c48f2b59ac" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.287432 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3c123bd00d046e75e998aee240163af57623ae2dddb493adf8f99c48f2b59ac"} err="failed to get container status \"c3c123bd00d046e75e998aee240163af57623ae2dddb493adf8f99c48f2b59ac\": rpc error: code = NotFound desc = could not find container \"c3c123bd00d046e75e998aee240163af57623ae2dddb493adf8f99c48f2b59ac\": container with ID starting with c3c123bd00d046e75e998aee240163af57623ae2dddb493adf8f99c48f2b59ac not found: ID does not exist" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.287459 4661 scope.go:117] "RemoveContainer" containerID="d874aecf1cc549de77315672b5b56cb3a81ee8aebd87aa2dd0fe204062644c27" Oct 01 05:33:36 crc kubenswrapper[4661]: E1001 05:33:36.287775 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d874aecf1cc549de77315672b5b56cb3a81ee8aebd87aa2dd0fe204062644c27\": container with ID starting with d874aecf1cc549de77315672b5b56cb3a81ee8aebd87aa2dd0fe204062644c27 not found: ID does not exist" containerID="d874aecf1cc549de77315672b5b56cb3a81ee8aebd87aa2dd0fe204062644c27" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.287794 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d874aecf1cc549de77315672b5b56cb3a81ee8aebd87aa2dd0fe204062644c27"} err="failed to get container status \"d874aecf1cc549de77315672b5b56cb3a81ee8aebd87aa2dd0fe204062644c27\": rpc error: code = NotFound desc = could not find container \"d874aecf1cc549de77315672b5b56cb3a81ee8aebd87aa2dd0fe204062644c27\": container with ID starting with d874aecf1cc549de77315672b5b56cb3a81ee8aebd87aa2dd0fe204062644c27 not found: ID does not exist" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.287978 4661 scope.go:117] "RemoveContainer" containerID="75145a8ae3c8dfc0b0c5042c6326507ef718006b0fb99658520727a37255f750" Oct 01 05:33:36 crc kubenswrapper[4661]: E1001 05:33:36.288171 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75145a8ae3c8dfc0b0c5042c6326507ef718006b0fb99658520727a37255f750\": container with ID starting with 75145a8ae3c8dfc0b0c5042c6326507ef718006b0fb99658520727a37255f750 not found: ID does not exist" containerID="75145a8ae3c8dfc0b0c5042c6326507ef718006b0fb99658520727a37255f750" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.288190 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75145a8ae3c8dfc0b0c5042c6326507ef718006b0fb99658520727a37255f750"} err="failed to get container status \"75145a8ae3c8dfc0b0c5042c6326507ef718006b0fb99658520727a37255f750\": rpc error: code = NotFound desc = could not find container \"75145a8ae3c8dfc0b0c5042c6326507ef718006b0fb99658520727a37255f750\": container with ID starting with 75145a8ae3c8dfc0b0c5042c6326507ef718006b0fb99658520727a37255f750 not found: ID does not exist" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.288202 4661 scope.go:117] "RemoveContainer" containerID="ea408f0819fd82e8baab6f0fe05c700b776828763e946e1d9efa450e2eb536e6" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.289328 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db7ff42d-9b9d-479f-966d-768148f27414-kube-api-access-vdlfj" (OuterVolumeSpecName: "kube-api-access-vdlfj") pod "db7ff42d-9b9d-479f-966d-768148f27414" (UID: "db7ff42d-9b9d-479f-966d-768148f27414"). InnerVolumeSpecName "kube-api-access-vdlfj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.308705 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dtjdw"] Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.312166 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b56c64d-0bba-4d20-a6ae-e6c9349d0c04-kube-api-access-9qxmb" (OuterVolumeSpecName: "kube-api-access-9qxmb") pod "3b56c64d-0bba-4d20-a6ae-e6c9349d0c04" (UID: "3b56c64d-0bba-4d20-a6ae-e6c9349d0c04"). InnerVolumeSpecName "kube-api-access-9qxmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.312563 4661 scope.go:117] "RemoveContainer" containerID="ea408f0819fd82e8baab6f0fe05c700b776828763e946e1d9efa450e2eb536e6" Oct 01 05:33:36 crc kubenswrapper[4661]: E1001 05:33:36.313042 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea408f0819fd82e8baab6f0fe05c700b776828763e946e1d9efa450e2eb536e6\": container with ID starting with ea408f0819fd82e8baab6f0fe05c700b776828763e946e1d9efa450e2eb536e6 not found: ID does not exist" containerID="ea408f0819fd82e8baab6f0fe05c700b776828763e946e1d9efa450e2eb536e6" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.313078 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea408f0819fd82e8baab6f0fe05c700b776828763e946e1d9efa450e2eb536e6"} err="failed to get container status \"ea408f0819fd82e8baab6f0fe05c700b776828763e946e1d9efa450e2eb536e6\": rpc error: code = NotFound desc = could not find container \"ea408f0819fd82e8baab6f0fe05c700b776828763e946e1d9efa450e2eb536e6\": container with ID starting with ea408f0819fd82e8baab6f0fe05c700b776828763e946e1d9efa450e2eb536e6 not found: ID does not exist" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.313118 4661 scope.go:117] "RemoveContainer" containerID="6af38de2cb8cd060e8ab296170ea714ac94abc58b39a0ece5a2903641f87b590" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.314386 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dtjdw"] Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.314494 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db7ff42d-9b9d-479f-966d-768148f27414-utilities" (OuterVolumeSpecName: "utilities") pod "db7ff42d-9b9d-479f-966d-768148f27414" (UID: "db7ff42d-9b9d-479f-966d-768148f27414"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.317248 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42" (UID: "b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.330723 4661 scope.go:117] "RemoveContainer" containerID="8e1f10dc6ab6d7ea6f16395338cbb21390d5e112593eac650caa815d7013c96c" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.349766 4661 scope.go:117] "RemoveContainer" containerID="f6b39e14b22d17a58a23e556158c808194c889519266769cb539fcf4a5d5e225" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.368562 4661 scope.go:117] "RemoveContainer" containerID="6af38de2cb8cd060e8ab296170ea714ac94abc58b39a0ece5a2903641f87b590" Oct 01 05:33:36 crc kubenswrapper[4661]: E1001 05:33:36.368985 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6af38de2cb8cd060e8ab296170ea714ac94abc58b39a0ece5a2903641f87b590\": container with ID starting with 6af38de2cb8cd060e8ab296170ea714ac94abc58b39a0ece5a2903641f87b590 not found: ID does not exist" containerID="6af38de2cb8cd060e8ab296170ea714ac94abc58b39a0ece5a2903641f87b590" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.369029 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6af38de2cb8cd060e8ab296170ea714ac94abc58b39a0ece5a2903641f87b590"} err="failed to get container status \"6af38de2cb8cd060e8ab296170ea714ac94abc58b39a0ece5a2903641f87b590\": rpc error: code = NotFound desc = could not find container \"6af38de2cb8cd060e8ab296170ea714ac94abc58b39a0ece5a2903641f87b590\": container with ID starting with 6af38de2cb8cd060e8ab296170ea714ac94abc58b39a0ece5a2903641f87b590 not found: ID does not exist" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.369057 4661 scope.go:117] "RemoveContainer" containerID="8e1f10dc6ab6d7ea6f16395338cbb21390d5e112593eac650caa815d7013c96c" Oct 01 05:33:36 crc kubenswrapper[4661]: E1001 05:33:36.369383 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e1f10dc6ab6d7ea6f16395338cbb21390d5e112593eac650caa815d7013c96c\": container with ID starting with 8e1f10dc6ab6d7ea6f16395338cbb21390d5e112593eac650caa815d7013c96c not found: ID does not exist" containerID="8e1f10dc6ab6d7ea6f16395338cbb21390d5e112593eac650caa815d7013c96c" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.369413 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e1f10dc6ab6d7ea6f16395338cbb21390d5e112593eac650caa815d7013c96c"} err="failed to get container status \"8e1f10dc6ab6d7ea6f16395338cbb21390d5e112593eac650caa815d7013c96c\": rpc error: code = NotFound desc = could not find container \"8e1f10dc6ab6d7ea6f16395338cbb21390d5e112593eac650caa815d7013c96c\": container with ID starting with 8e1f10dc6ab6d7ea6f16395338cbb21390d5e112593eac650caa815d7013c96c not found: ID does not exist" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.369433 4661 scope.go:117] "RemoveContainer" containerID="f6b39e14b22d17a58a23e556158c808194c889519266769cb539fcf4a5d5e225" Oct 01 05:33:36 crc kubenswrapper[4661]: E1001 05:33:36.369773 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6b39e14b22d17a58a23e556158c808194c889519266769cb539fcf4a5d5e225\": container with ID starting with f6b39e14b22d17a58a23e556158c808194c889519266769cb539fcf4a5d5e225 not found: ID does not exist" containerID="f6b39e14b22d17a58a23e556158c808194c889519266769cb539fcf4a5d5e225" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.369813 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6b39e14b22d17a58a23e556158c808194c889519266769cb539fcf4a5d5e225"} err="failed to get container status \"f6b39e14b22d17a58a23e556158c808194c889519266769cb539fcf4a5d5e225\": rpc error: code = NotFound desc = could not find container \"f6b39e14b22d17a58a23e556158c808194c889519266769cb539fcf4a5d5e225\": container with ID starting with f6b39e14b22d17a58a23e556158c808194c889519266769cb539fcf4a5d5e225 not found: ID does not exist" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.369840 4661 scope.go:117] "RemoveContainer" containerID="87b0259cdd569c022111c6e1317f47167ac3e0965fda66d49994156b3da29d94" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.382608 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.382706 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdlfj\" (UniqueName: \"kubernetes.io/projected/db7ff42d-9b9d-479f-966d-768148f27414-kube-api-access-vdlfj\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.382721 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b4htr\" (UniqueName: \"kubernetes.io/projected/b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42-kube-api-access-b4htr\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.382732 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db7ff42d-9b9d-479f-966d-768148f27414-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.382742 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9qxmb\" (UniqueName: \"kubernetes.io/projected/3b56c64d-0bba-4d20-a6ae-e6c9349d0c04-kube-api-access-9qxmb\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.382752 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.382764 4661 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3b56c64d-0bba-4d20-a6ae-e6c9349d0c04-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.382775 4661 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3b56c64d-0bba-4d20-a6ae-e6c9349d0c04-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.387261 4661 scope.go:117] "RemoveContainer" containerID="6667489d39c72c422c6a3bfa3ee0f2362cf14f16d6129b3e7d07bac5939ee4e7" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.400509 4661 scope.go:117] "RemoveContainer" containerID="ea9b84b3a2ddc5e47fee9092bdb2386144e25aafd0b55ec6eab95c0ea331cca6" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.403124 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db7ff42d-9b9d-479f-966d-768148f27414-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "db7ff42d-9b9d-479f-966d-768148f27414" (UID: "db7ff42d-9b9d-479f-966d-768148f27414"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.421568 4661 scope.go:117] "RemoveContainer" containerID="87b0259cdd569c022111c6e1317f47167ac3e0965fda66d49994156b3da29d94" Oct 01 05:33:36 crc kubenswrapper[4661]: E1001 05:33:36.422070 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87b0259cdd569c022111c6e1317f47167ac3e0965fda66d49994156b3da29d94\": container with ID starting with 87b0259cdd569c022111c6e1317f47167ac3e0965fda66d49994156b3da29d94 not found: ID does not exist" containerID="87b0259cdd569c022111c6e1317f47167ac3e0965fda66d49994156b3da29d94" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.422121 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87b0259cdd569c022111c6e1317f47167ac3e0965fda66d49994156b3da29d94"} err="failed to get container status \"87b0259cdd569c022111c6e1317f47167ac3e0965fda66d49994156b3da29d94\": rpc error: code = NotFound desc = could not find container \"87b0259cdd569c022111c6e1317f47167ac3e0965fda66d49994156b3da29d94\": container with ID starting with 87b0259cdd569c022111c6e1317f47167ac3e0965fda66d49994156b3da29d94 not found: ID does not exist" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.422154 4661 scope.go:117] "RemoveContainer" containerID="6667489d39c72c422c6a3bfa3ee0f2362cf14f16d6129b3e7d07bac5939ee4e7" Oct 01 05:33:36 crc kubenswrapper[4661]: E1001 05:33:36.422491 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6667489d39c72c422c6a3bfa3ee0f2362cf14f16d6129b3e7d07bac5939ee4e7\": container with ID starting with 6667489d39c72c422c6a3bfa3ee0f2362cf14f16d6129b3e7d07bac5939ee4e7 not found: ID does not exist" containerID="6667489d39c72c422c6a3bfa3ee0f2362cf14f16d6129b3e7d07bac5939ee4e7" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.422524 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6667489d39c72c422c6a3bfa3ee0f2362cf14f16d6129b3e7d07bac5939ee4e7"} err="failed to get container status \"6667489d39c72c422c6a3bfa3ee0f2362cf14f16d6129b3e7d07bac5939ee4e7\": rpc error: code = NotFound desc = could not find container \"6667489d39c72c422c6a3bfa3ee0f2362cf14f16d6129b3e7d07bac5939ee4e7\": container with ID starting with 6667489d39c72c422c6a3bfa3ee0f2362cf14f16d6129b3e7d07bac5939ee4e7 not found: ID does not exist" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.422548 4661 scope.go:117] "RemoveContainer" containerID="ea9b84b3a2ddc5e47fee9092bdb2386144e25aafd0b55ec6eab95c0ea331cca6" Oct 01 05:33:36 crc kubenswrapper[4661]: E1001 05:33:36.422900 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea9b84b3a2ddc5e47fee9092bdb2386144e25aafd0b55ec6eab95c0ea331cca6\": container with ID starting with ea9b84b3a2ddc5e47fee9092bdb2386144e25aafd0b55ec6eab95c0ea331cca6 not found: ID does not exist" containerID="ea9b84b3a2ddc5e47fee9092bdb2386144e25aafd0b55ec6eab95c0ea331cca6" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.422925 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea9b84b3a2ddc5e47fee9092bdb2386144e25aafd0b55ec6eab95c0ea331cca6"} err="failed to get container status \"ea9b84b3a2ddc5e47fee9092bdb2386144e25aafd0b55ec6eab95c0ea331cca6\": rpc error: code = NotFound desc = could not find container \"ea9b84b3a2ddc5e47fee9092bdb2386144e25aafd0b55ec6eab95c0ea331cca6\": container with ID starting with ea9b84b3a2ddc5e47fee9092bdb2386144e25aafd0b55ec6eab95c0ea331cca6 not found: ID does not exist" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.422940 4661 scope.go:117] "RemoveContainer" containerID="3a115a99ed504ae55e539f9822b49abfd795c43ba6c317413375aa489a64c8f0" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.437590 4661 scope.go:117] "RemoveContainer" containerID="997db8213ef1b7764eb9acebadfac341c2fbeef8c63887b780ee0742edafb4e1" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.451611 4661 scope.go:117] "RemoveContainer" containerID="3ceb103f23b7b8c06640305ba6d98e2bde5aeee0e4f514183fc637eee4da2afc" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.464332 4661 scope.go:117] "RemoveContainer" containerID="3a115a99ed504ae55e539f9822b49abfd795c43ba6c317413375aa489a64c8f0" Oct 01 05:33:36 crc kubenswrapper[4661]: E1001 05:33:36.464812 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a115a99ed504ae55e539f9822b49abfd795c43ba6c317413375aa489a64c8f0\": container with ID starting with 3a115a99ed504ae55e539f9822b49abfd795c43ba6c317413375aa489a64c8f0 not found: ID does not exist" containerID="3a115a99ed504ae55e539f9822b49abfd795c43ba6c317413375aa489a64c8f0" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.464852 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a115a99ed504ae55e539f9822b49abfd795c43ba6c317413375aa489a64c8f0"} err="failed to get container status \"3a115a99ed504ae55e539f9822b49abfd795c43ba6c317413375aa489a64c8f0\": rpc error: code = NotFound desc = could not find container \"3a115a99ed504ae55e539f9822b49abfd795c43ba6c317413375aa489a64c8f0\": container with ID starting with 3a115a99ed504ae55e539f9822b49abfd795c43ba6c317413375aa489a64c8f0 not found: ID does not exist" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.464880 4661 scope.go:117] "RemoveContainer" containerID="997db8213ef1b7764eb9acebadfac341c2fbeef8c63887b780ee0742edafb4e1" Oct 01 05:33:36 crc kubenswrapper[4661]: E1001 05:33:36.465223 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"997db8213ef1b7764eb9acebadfac341c2fbeef8c63887b780ee0742edafb4e1\": container with ID starting with 997db8213ef1b7764eb9acebadfac341c2fbeef8c63887b780ee0742edafb4e1 not found: ID does not exist" containerID="997db8213ef1b7764eb9acebadfac341c2fbeef8c63887b780ee0742edafb4e1" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.465253 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"997db8213ef1b7764eb9acebadfac341c2fbeef8c63887b780ee0742edafb4e1"} err="failed to get container status \"997db8213ef1b7764eb9acebadfac341c2fbeef8c63887b780ee0742edafb4e1\": rpc error: code = NotFound desc = could not find container \"997db8213ef1b7764eb9acebadfac341c2fbeef8c63887b780ee0742edafb4e1\": container with ID starting with 997db8213ef1b7764eb9acebadfac341c2fbeef8c63887b780ee0742edafb4e1 not found: ID does not exist" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.465272 4661 scope.go:117] "RemoveContainer" containerID="3ceb103f23b7b8c06640305ba6d98e2bde5aeee0e4f514183fc637eee4da2afc" Oct 01 05:33:36 crc kubenswrapper[4661]: E1001 05:33:36.465512 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ceb103f23b7b8c06640305ba6d98e2bde5aeee0e4f514183fc637eee4da2afc\": container with ID starting with 3ceb103f23b7b8c06640305ba6d98e2bde5aeee0e4f514183fc637eee4da2afc not found: ID does not exist" containerID="3ceb103f23b7b8c06640305ba6d98e2bde5aeee0e4f514183fc637eee4da2afc" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.465545 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ceb103f23b7b8c06640305ba6d98e2bde5aeee0e4f514183fc637eee4da2afc"} err="failed to get container status \"3ceb103f23b7b8c06640305ba6d98e2bde5aeee0e4f514183fc637eee4da2afc\": rpc error: code = NotFound desc = could not find container \"3ceb103f23b7b8c06640305ba6d98e2bde5aeee0e4f514183fc637eee4da2afc\": container with ID starting with 3ceb103f23b7b8c06640305ba6d98e2bde5aeee0e4f514183fc637eee4da2afc not found: ID does not exist" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.484992 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db7ff42d-9b9d-479f-966d-768148f27414-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.546751 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gn77p"] Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.550349 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gn77p"] Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.582184 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nwqhx"] Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.586573 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-nwqhx"] Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.604329 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-s4skm"] Oct 01 05:33:36 crc kubenswrapper[4661]: I1001 05:33:36.608302 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-s4skm"] Oct 01 05:33:37 crc kubenswrapper[4661]: I1001 05:33:37.274402 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-r5ghr" Oct 01 05:33:37 crc kubenswrapper[4661]: I1001 05:33:37.281862 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xpqj9" event={"ID":"af543e28-92e4-4c71-a1dc-1478f2c25169","Type":"ContainerStarted","Data":"cad1b7119cc4a8d751420aa749536ef343d4d1df98c20f65747486798f069ec0"} Oct 01 05:33:37 crc kubenswrapper[4661]: I1001 05:33:37.281940 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xpqj9" event={"ID":"af543e28-92e4-4c71-a1dc-1478f2c25169","Type":"ContainerStarted","Data":"b58a96ffa83601cf2d3d699adb3bb42e58b1393277bafb79004de12e03e488f6"} Oct 01 05:33:37 crc kubenswrapper[4661]: I1001 05:33:37.282385 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-xpqj9" Oct 01 05:33:37 crc kubenswrapper[4661]: I1001 05:33:37.286453 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-xpqj9" Oct 01 05:33:37 crc kubenswrapper[4661]: I1001 05:33:37.324738 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-xpqj9" podStartSLOduration=2.324714329 podStartE2EDuration="2.324714329s" podCreationTimestamp="2025-10-01 05:33:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:33:37.305105111 +0000 UTC m=+266.243083725" watchObservedRunningTime="2025-10-01 05:33:37.324714329 +0000 UTC m=+266.262692983" Oct 01 05:33:37 crc kubenswrapper[4661]: I1001 05:33:37.338481 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-r5ghr"] Oct 01 05:33:37 crc kubenswrapper[4661]: I1001 05:33:37.346365 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-r5ghr"] Oct 01 05:33:37 crc kubenswrapper[4661]: I1001 05:33:37.764766 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f018e51-8c87-444b-9d63-3dadae9cd246" path="/var/lib/kubelet/pods/0f018e51-8c87-444b-9d63-3dadae9cd246/volumes" Oct 01 05:33:37 crc kubenswrapper[4661]: I1001 05:33:37.766689 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b56c64d-0bba-4d20-a6ae-e6c9349d0c04" path="/var/lib/kubelet/pods/3b56c64d-0bba-4d20-a6ae-e6c9349d0c04/volumes" Oct 01 05:33:37 crc kubenswrapper[4661]: I1001 05:33:37.767672 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e6c3210-02ed-40f9-8e61-2a0ba0141ba7" path="/var/lib/kubelet/pods/6e6c3210-02ed-40f9-8e61-2a0ba0141ba7/volumes" Oct 01 05:33:37 crc kubenswrapper[4661]: I1001 05:33:37.768980 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42" path="/var/lib/kubelet/pods/b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42/volumes" Oct 01 05:33:37 crc kubenswrapper[4661]: I1001 05:33:37.771207 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db7ff42d-9b9d-479f-966d-768148f27414" path="/var/lib/kubelet/pods/db7ff42d-9b9d-479f-966d-768148f27414/volumes" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.029078 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tc8mt"] Oct 01 05:33:38 crc kubenswrapper[4661]: E1001 05:33:38.030257 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b56c64d-0bba-4d20-a6ae-e6c9349d0c04" containerName="marketplace-operator" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.030273 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b56c64d-0bba-4d20-a6ae-e6c9349d0c04" containerName="marketplace-operator" Oct 01 05:33:38 crc kubenswrapper[4661]: E1001 05:33:38.030283 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f018e51-8c87-444b-9d63-3dadae9cd246" containerName="extract-utilities" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.030289 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f018e51-8c87-444b-9d63-3dadae9cd246" containerName="extract-utilities" Oct 01 05:33:38 crc kubenswrapper[4661]: E1001 05:33:38.030299 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e6c3210-02ed-40f9-8e61-2a0ba0141ba7" containerName="extract-content" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.030306 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e6c3210-02ed-40f9-8e61-2a0ba0141ba7" containerName="extract-content" Oct 01 05:33:38 crc kubenswrapper[4661]: E1001 05:33:38.030315 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f018e51-8c87-444b-9d63-3dadae9cd246" containerName="registry-server" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.030322 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f018e51-8c87-444b-9d63-3dadae9cd246" containerName="registry-server" Oct 01 05:33:38 crc kubenswrapper[4661]: E1001 05:33:38.030335 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db7ff42d-9b9d-479f-966d-768148f27414" containerName="extract-content" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.030342 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="db7ff42d-9b9d-479f-966d-768148f27414" containerName="extract-content" Oct 01 05:33:38 crc kubenswrapper[4661]: E1001 05:33:38.030354 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f018e51-8c87-444b-9d63-3dadae9cd246" containerName="extract-content" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.030362 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f018e51-8c87-444b-9d63-3dadae9cd246" containerName="extract-content" Oct 01 05:33:38 crc kubenswrapper[4661]: E1001 05:33:38.030370 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e6c3210-02ed-40f9-8e61-2a0ba0141ba7" containerName="extract-utilities" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.030377 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e6c3210-02ed-40f9-8e61-2a0ba0141ba7" containerName="extract-utilities" Oct 01 05:33:38 crc kubenswrapper[4661]: E1001 05:33:38.030387 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db7ff42d-9b9d-479f-966d-768148f27414" containerName="registry-server" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.030414 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="db7ff42d-9b9d-479f-966d-768148f27414" containerName="registry-server" Oct 01 05:33:38 crc kubenswrapper[4661]: E1001 05:33:38.030428 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e6c3210-02ed-40f9-8e61-2a0ba0141ba7" containerName="registry-server" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.030436 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e6c3210-02ed-40f9-8e61-2a0ba0141ba7" containerName="registry-server" Oct 01 05:33:38 crc kubenswrapper[4661]: E1001 05:33:38.030447 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db7ff42d-9b9d-479f-966d-768148f27414" containerName="extract-utilities" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.030454 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="db7ff42d-9b9d-479f-966d-768148f27414" containerName="extract-utilities" Oct 01 05:33:38 crc kubenswrapper[4661]: E1001 05:33:38.030463 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42" containerName="extract-content" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.030469 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42" containerName="extract-content" Oct 01 05:33:38 crc kubenswrapper[4661]: E1001 05:33:38.030479 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42" containerName="registry-server" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.030486 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42" containerName="registry-server" Oct 01 05:33:38 crc kubenswrapper[4661]: E1001 05:33:38.030496 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42" containerName="extract-utilities" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.030502 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42" containerName="extract-utilities" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.031043 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7ee9fa7-7154-45d6-a6f4-f78cdbaa1c42" containerName="registry-server" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.031060 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="db7ff42d-9b9d-479f-966d-768148f27414" containerName="registry-server" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.031068 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e6c3210-02ed-40f9-8e61-2a0ba0141ba7" containerName="registry-server" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.031076 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b56c64d-0bba-4d20-a6ae-e6c9349d0c04" containerName="marketplace-operator" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.031086 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f018e51-8c87-444b-9d63-3dadae9cd246" containerName="registry-server" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.031793 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tc8mt" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.033391 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.049570 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tc8mt"] Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.103327 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7sg2r\" (UniqueName: \"kubernetes.io/projected/1ec4e027-d0b0-4931-a551-2bfbd8769337-kube-api-access-7sg2r\") pod \"redhat-marketplace-tc8mt\" (UID: \"1ec4e027-d0b0-4931-a551-2bfbd8769337\") " pod="openshift-marketplace/redhat-marketplace-tc8mt" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.103381 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ec4e027-d0b0-4931-a551-2bfbd8769337-utilities\") pod \"redhat-marketplace-tc8mt\" (UID: \"1ec4e027-d0b0-4931-a551-2bfbd8769337\") " pod="openshift-marketplace/redhat-marketplace-tc8mt" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.103409 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ec4e027-d0b0-4931-a551-2bfbd8769337-catalog-content\") pod \"redhat-marketplace-tc8mt\" (UID: \"1ec4e027-d0b0-4931-a551-2bfbd8769337\") " pod="openshift-marketplace/redhat-marketplace-tc8mt" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.204307 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7sg2r\" (UniqueName: \"kubernetes.io/projected/1ec4e027-d0b0-4931-a551-2bfbd8769337-kube-api-access-7sg2r\") pod \"redhat-marketplace-tc8mt\" (UID: \"1ec4e027-d0b0-4931-a551-2bfbd8769337\") " pod="openshift-marketplace/redhat-marketplace-tc8mt" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.204575 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ec4e027-d0b0-4931-a551-2bfbd8769337-utilities\") pod \"redhat-marketplace-tc8mt\" (UID: \"1ec4e027-d0b0-4931-a551-2bfbd8769337\") " pod="openshift-marketplace/redhat-marketplace-tc8mt" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.204712 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ec4e027-d0b0-4931-a551-2bfbd8769337-catalog-content\") pod \"redhat-marketplace-tc8mt\" (UID: \"1ec4e027-d0b0-4931-a551-2bfbd8769337\") " pod="openshift-marketplace/redhat-marketplace-tc8mt" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.205073 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ec4e027-d0b0-4931-a551-2bfbd8769337-utilities\") pod \"redhat-marketplace-tc8mt\" (UID: \"1ec4e027-d0b0-4931-a551-2bfbd8769337\") " pod="openshift-marketplace/redhat-marketplace-tc8mt" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.205108 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ec4e027-d0b0-4931-a551-2bfbd8769337-catalog-content\") pod \"redhat-marketplace-tc8mt\" (UID: \"1ec4e027-d0b0-4931-a551-2bfbd8769337\") " pod="openshift-marketplace/redhat-marketplace-tc8mt" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.222221 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7sg2r\" (UniqueName: \"kubernetes.io/projected/1ec4e027-d0b0-4931-a551-2bfbd8769337-kube-api-access-7sg2r\") pod \"redhat-marketplace-tc8mt\" (UID: \"1ec4e027-d0b0-4931-a551-2bfbd8769337\") " pod="openshift-marketplace/redhat-marketplace-tc8mt" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.387911 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tc8mt" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.573921 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tc8mt"] Oct 01 05:33:38 crc kubenswrapper[4661]: W1001 05:33:38.583451 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1ec4e027_d0b0_4931_a551_2bfbd8769337.slice/crio-96e64f327e4d56b56d7e61b403a33fc4e4eb84a64b33778dfc3ee5132b978309 WatchSource:0}: Error finding container 96e64f327e4d56b56d7e61b403a33fc4e4eb84a64b33778dfc3ee5132b978309: Status 404 returned error can't find the container with id 96e64f327e4d56b56d7e61b403a33fc4e4eb84a64b33778dfc3ee5132b978309 Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.630535 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-l56gs"] Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.631459 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l56gs" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.634865 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.641515 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l56gs"] Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.719945 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2dccb83e-7a19-4707-b2cc-8c5f68ebc261-utilities\") pod \"redhat-operators-l56gs\" (UID: \"2dccb83e-7a19-4707-b2cc-8c5f68ebc261\") " pod="openshift-marketplace/redhat-operators-l56gs" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.720302 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4knkl\" (UniqueName: \"kubernetes.io/projected/2dccb83e-7a19-4707-b2cc-8c5f68ebc261-kube-api-access-4knkl\") pod \"redhat-operators-l56gs\" (UID: \"2dccb83e-7a19-4707-b2cc-8c5f68ebc261\") " pod="openshift-marketplace/redhat-operators-l56gs" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.720326 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2dccb83e-7a19-4707-b2cc-8c5f68ebc261-catalog-content\") pod \"redhat-operators-l56gs\" (UID: \"2dccb83e-7a19-4707-b2cc-8c5f68ebc261\") " pod="openshift-marketplace/redhat-operators-l56gs" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.821558 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2dccb83e-7a19-4707-b2cc-8c5f68ebc261-utilities\") pod \"redhat-operators-l56gs\" (UID: \"2dccb83e-7a19-4707-b2cc-8c5f68ebc261\") " pod="openshift-marketplace/redhat-operators-l56gs" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.821612 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4knkl\" (UniqueName: \"kubernetes.io/projected/2dccb83e-7a19-4707-b2cc-8c5f68ebc261-kube-api-access-4knkl\") pod \"redhat-operators-l56gs\" (UID: \"2dccb83e-7a19-4707-b2cc-8c5f68ebc261\") " pod="openshift-marketplace/redhat-operators-l56gs" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.821652 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2dccb83e-7a19-4707-b2cc-8c5f68ebc261-catalog-content\") pod \"redhat-operators-l56gs\" (UID: \"2dccb83e-7a19-4707-b2cc-8c5f68ebc261\") " pod="openshift-marketplace/redhat-operators-l56gs" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.822165 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2dccb83e-7a19-4707-b2cc-8c5f68ebc261-catalog-content\") pod \"redhat-operators-l56gs\" (UID: \"2dccb83e-7a19-4707-b2cc-8c5f68ebc261\") " pod="openshift-marketplace/redhat-operators-l56gs" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.822190 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2dccb83e-7a19-4707-b2cc-8c5f68ebc261-utilities\") pod \"redhat-operators-l56gs\" (UID: \"2dccb83e-7a19-4707-b2cc-8c5f68ebc261\") " pod="openshift-marketplace/redhat-operators-l56gs" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.839568 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4knkl\" (UniqueName: \"kubernetes.io/projected/2dccb83e-7a19-4707-b2cc-8c5f68ebc261-kube-api-access-4knkl\") pod \"redhat-operators-l56gs\" (UID: \"2dccb83e-7a19-4707-b2cc-8c5f68ebc261\") " pod="openshift-marketplace/redhat-operators-l56gs" Oct 01 05:33:38 crc kubenswrapper[4661]: I1001 05:33:38.956289 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l56gs" Oct 01 05:33:39 crc kubenswrapper[4661]: I1001 05:33:39.295097 4661 generic.go:334] "Generic (PLEG): container finished" podID="1ec4e027-d0b0-4931-a551-2bfbd8769337" containerID="db2a1a15fad25fec4b42162cb11e261cb76c958fec43702a210dcdedf74fa6fa" exitCode=0 Oct 01 05:33:39 crc kubenswrapper[4661]: I1001 05:33:39.295134 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tc8mt" event={"ID":"1ec4e027-d0b0-4931-a551-2bfbd8769337","Type":"ContainerDied","Data":"db2a1a15fad25fec4b42162cb11e261cb76c958fec43702a210dcdedf74fa6fa"} Oct 01 05:33:39 crc kubenswrapper[4661]: I1001 05:33:39.295509 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tc8mt" event={"ID":"1ec4e027-d0b0-4931-a551-2bfbd8769337","Type":"ContainerStarted","Data":"96e64f327e4d56b56d7e61b403a33fc4e4eb84a64b33778dfc3ee5132b978309"} Oct 01 05:33:39 crc kubenswrapper[4661]: I1001 05:33:39.356989 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l56gs"] Oct 01 05:33:40 crc kubenswrapper[4661]: I1001 05:33:40.302003 4661 generic.go:334] "Generic (PLEG): container finished" podID="1ec4e027-d0b0-4931-a551-2bfbd8769337" containerID="d316e1eff175552b8c6782417e35d6562e3769e4b7f9544bd4420b02cf6d3516" exitCode=0 Oct 01 05:33:40 crc kubenswrapper[4661]: I1001 05:33:40.302426 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tc8mt" event={"ID":"1ec4e027-d0b0-4931-a551-2bfbd8769337","Type":"ContainerDied","Data":"d316e1eff175552b8c6782417e35d6562e3769e4b7f9544bd4420b02cf6d3516"} Oct 01 05:33:40 crc kubenswrapper[4661]: I1001 05:33:40.303447 4661 generic.go:334] "Generic (PLEG): container finished" podID="2dccb83e-7a19-4707-b2cc-8c5f68ebc261" containerID="53d8ddf8050e3abf768fdb0edeaec3d2814acf7d58fa73b65aa21dd8981ff431" exitCode=0 Oct 01 05:33:40 crc kubenswrapper[4661]: I1001 05:33:40.303469 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l56gs" event={"ID":"2dccb83e-7a19-4707-b2cc-8c5f68ebc261","Type":"ContainerDied","Data":"53d8ddf8050e3abf768fdb0edeaec3d2814acf7d58fa73b65aa21dd8981ff431"} Oct 01 05:33:40 crc kubenswrapper[4661]: I1001 05:33:40.303483 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l56gs" event={"ID":"2dccb83e-7a19-4707-b2cc-8c5f68ebc261","Type":"ContainerStarted","Data":"e303be4166f09aa33899e16720ecf94ee5048da572f5c48687a34f8fd94cf016"} Oct 01 05:33:40 crc kubenswrapper[4661]: I1001 05:33:40.428724 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gwwgw"] Oct 01 05:33:40 crc kubenswrapper[4661]: I1001 05:33:40.429666 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gwwgw" Oct 01 05:33:40 crc kubenswrapper[4661]: I1001 05:33:40.433090 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 01 05:33:40 crc kubenswrapper[4661]: I1001 05:33:40.441475 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gwwgw"] Oct 01 05:33:40 crc kubenswrapper[4661]: I1001 05:33:40.544956 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de60a77e-3fb7-4777-94de-54b40db66c7a-utilities\") pod \"certified-operators-gwwgw\" (UID: \"de60a77e-3fb7-4777-94de-54b40db66c7a\") " pod="openshift-marketplace/certified-operators-gwwgw" Oct 01 05:33:40 crc kubenswrapper[4661]: I1001 05:33:40.544997 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmh8k\" (UniqueName: \"kubernetes.io/projected/de60a77e-3fb7-4777-94de-54b40db66c7a-kube-api-access-cmh8k\") pod \"certified-operators-gwwgw\" (UID: \"de60a77e-3fb7-4777-94de-54b40db66c7a\") " pod="openshift-marketplace/certified-operators-gwwgw" Oct 01 05:33:40 crc kubenswrapper[4661]: I1001 05:33:40.545052 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de60a77e-3fb7-4777-94de-54b40db66c7a-catalog-content\") pod \"certified-operators-gwwgw\" (UID: \"de60a77e-3fb7-4777-94de-54b40db66c7a\") " pod="openshift-marketplace/certified-operators-gwwgw" Oct 01 05:33:40 crc kubenswrapper[4661]: I1001 05:33:40.646455 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de60a77e-3fb7-4777-94de-54b40db66c7a-utilities\") pod \"certified-operators-gwwgw\" (UID: \"de60a77e-3fb7-4777-94de-54b40db66c7a\") " pod="openshift-marketplace/certified-operators-gwwgw" Oct 01 05:33:40 crc kubenswrapper[4661]: I1001 05:33:40.646717 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmh8k\" (UniqueName: \"kubernetes.io/projected/de60a77e-3fb7-4777-94de-54b40db66c7a-kube-api-access-cmh8k\") pod \"certified-operators-gwwgw\" (UID: \"de60a77e-3fb7-4777-94de-54b40db66c7a\") " pod="openshift-marketplace/certified-operators-gwwgw" Oct 01 05:33:40 crc kubenswrapper[4661]: I1001 05:33:40.647042 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de60a77e-3fb7-4777-94de-54b40db66c7a-catalog-content\") pod \"certified-operators-gwwgw\" (UID: \"de60a77e-3fb7-4777-94de-54b40db66c7a\") " pod="openshift-marketplace/certified-operators-gwwgw" Oct 01 05:33:40 crc kubenswrapper[4661]: I1001 05:33:40.647071 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de60a77e-3fb7-4777-94de-54b40db66c7a-utilities\") pod \"certified-operators-gwwgw\" (UID: \"de60a77e-3fb7-4777-94de-54b40db66c7a\") " pod="openshift-marketplace/certified-operators-gwwgw" Oct 01 05:33:40 crc kubenswrapper[4661]: I1001 05:33:40.647774 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de60a77e-3fb7-4777-94de-54b40db66c7a-catalog-content\") pod \"certified-operators-gwwgw\" (UID: \"de60a77e-3fb7-4777-94de-54b40db66c7a\") " pod="openshift-marketplace/certified-operators-gwwgw" Oct 01 05:33:40 crc kubenswrapper[4661]: I1001 05:33:40.671384 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmh8k\" (UniqueName: \"kubernetes.io/projected/de60a77e-3fb7-4777-94de-54b40db66c7a-kube-api-access-cmh8k\") pod \"certified-operators-gwwgw\" (UID: \"de60a77e-3fb7-4777-94de-54b40db66c7a\") " pod="openshift-marketplace/certified-operators-gwwgw" Oct 01 05:33:40 crc kubenswrapper[4661]: I1001 05:33:40.755188 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gwwgw" Oct 01 05:33:41 crc kubenswrapper[4661]: I1001 05:33:41.027763 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jkssm"] Oct 01 05:33:41 crc kubenswrapper[4661]: I1001 05:33:41.029146 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jkssm" Oct 01 05:33:41 crc kubenswrapper[4661]: I1001 05:33:41.031439 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 01 05:33:41 crc kubenswrapper[4661]: I1001 05:33:41.047856 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jkssm"] Oct 01 05:33:41 crc kubenswrapper[4661]: I1001 05:33:41.160801 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gwwgw"] Oct 01 05:33:41 crc kubenswrapper[4661]: I1001 05:33:41.161266 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d5a94d5-db6c-439b-912e-ea501e444d57-utilities\") pod \"community-operators-jkssm\" (UID: \"5d5a94d5-db6c-439b-912e-ea501e444d57\") " pod="openshift-marketplace/community-operators-jkssm" Oct 01 05:33:41 crc kubenswrapper[4661]: I1001 05:33:41.161305 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d5a94d5-db6c-439b-912e-ea501e444d57-catalog-content\") pod \"community-operators-jkssm\" (UID: \"5d5a94d5-db6c-439b-912e-ea501e444d57\") " pod="openshift-marketplace/community-operators-jkssm" Oct 01 05:33:41 crc kubenswrapper[4661]: I1001 05:33:41.161334 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7244z\" (UniqueName: \"kubernetes.io/projected/5d5a94d5-db6c-439b-912e-ea501e444d57-kube-api-access-7244z\") pod \"community-operators-jkssm\" (UID: \"5d5a94d5-db6c-439b-912e-ea501e444d57\") " pod="openshift-marketplace/community-operators-jkssm" Oct 01 05:33:41 crc kubenswrapper[4661]: I1001 05:33:41.262990 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d5a94d5-db6c-439b-912e-ea501e444d57-utilities\") pod \"community-operators-jkssm\" (UID: \"5d5a94d5-db6c-439b-912e-ea501e444d57\") " pod="openshift-marketplace/community-operators-jkssm" Oct 01 05:33:41 crc kubenswrapper[4661]: I1001 05:33:41.263399 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d5a94d5-db6c-439b-912e-ea501e444d57-catalog-content\") pod \"community-operators-jkssm\" (UID: \"5d5a94d5-db6c-439b-912e-ea501e444d57\") " pod="openshift-marketplace/community-operators-jkssm" Oct 01 05:33:41 crc kubenswrapper[4661]: I1001 05:33:41.263450 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7244z\" (UniqueName: \"kubernetes.io/projected/5d5a94d5-db6c-439b-912e-ea501e444d57-kube-api-access-7244z\") pod \"community-operators-jkssm\" (UID: \"5d5a94d5-db6c-439b-912e-ea501e444d57\") " pod="openshift-marketplace/community-operators-jkssm" Oct 01 05:33:41 crc kubenswrapper[4661]: I1001 05:33:41.263515 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d5a94d5-db6c-439b-912e-ea501e444d57-utilities\") pod \"community-operators-jkssm\" (UID: \"5d5a94d5-db6c-439b-912e-ea501e444d57\") " pod="openshift-marketplace/community-operators-jkssm" Oct 01 05:33:41 crc kubenswrapper[4661]: I1001 05:33:41.263766 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d5a94d5-db6c-439b-912e-ea501e444d57-catalog-content\") pod \"community-operators-jkssm\" (UID: \"5d5a94d5-db6c-439b-912e-ea501e444d57\") " pod="openshift-marketplace/community-operators-jkssm" Oct 01 05:33:41 crc kubenswrapper[4661]: I1001 05:33:41.281219 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7244z\" (UniqueName: \"kubernetes.io/projected/5d5a94d5-db6c-439b-912e-ea501e444d57-kube-api-access-7244z\") pod \"community-operators-jkssm\" (UID: \"5d5a94d5-db6c-439b-912e-ea501e444d57\") " pod="openshift-marketplace/community-operators-jkssm" Oct 01 05:33:41 crc kubenswrapper[4661]: I1001 05:33:41.310794 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tc8mt" event={"ID":"1ec4e027-d0b0-4931-a551-2bfbd8769337","Type":"ContainerStarted","Data":"b59f3a808a0c9786b9221b44aea942c123372b21f2ea3ae4aa165a1e497751f4"} Oct 01 05:33:41 crc kubenswrapper[4661]: I1001 05:33:41.312296 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gwwgw" event={"ID":"de60a77e-3fb7-4777-94de-54b40db66c7a","Type":"ContainerStarted","Data":"a192e67b0c7e79569720e382b1c2e476e5fae3588dee623b7b784202096738a4"} Oct 01 05:33:41 crc kubenswrapper[4661]: I1001 05:33:41.327691 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tc8mt" podStartSLOduration=1.884580992 podStartE2EDuration="3.327672199s" podCreationTimestamp="2025-10-01 05:33:38 +0000 UTC" firstStartedPulling="2025-10-01 05:33:39.297203609 +0000 UTC m=+268.235182263" lastFinishedPulling="2025-10-01 05:33:40.740294856 +0000 UTC m=+269.678273470" observedRunningTime="2025-10-01 05:33:41.324153479 +0000 UTC m=+270.262132093" watchObservedRunningTime="2025-10-01 05:33:41.327672199 +0000 UTC m=+270.265650813" Oct 01 05:33:41 crc kubenswrapper[4661]: I1001 05:33:41.364651 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jkssm" Oct 01 05:33:41 crc kubenswrapper[4661]: I1001 05:33:41.753791 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jkssm"] Oct 01 05:33:42 crc kubenswrapper[4661]: I1001 05:33:42.319472 4661 generic.go:334] "Generic (PLEG): container finished" podID="2dccb83e-7a19-4707-b2cc-8c5f68ebc261" containerID="895d284862d4e5ec6f1a4113fcb83f7b2fb383333f53f3b14669eb85ea49d175" exitCode=0 Oct 01 05:33:42 crc kubenswrapper[4661]: I1001 05:33:42.319524 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l56gs" event={"ID":"2dccb83e-7a19-4707-b2cc-8c5f68ebc261","Type":"ContainerDied","Data":"895d284862d4e5ec6f1a4113fcb83f7b2fb383333f53f3b14669eb85ea49d175"} Oct 01 05:33:42 crc kubenswrapper[4661]: I1001 05:33:42.321301 4661 generic.go:334] "Generic (PLEG): container finished" podID="5d5a94d5-db6c-439b-912e-ea501e444d57" containerID="94b6b95bb33d0fbd559ca31b85767f315fa56d0ef6fc93a356295c457e23f891" exitCode=0 Oct 01 05:33:42 crc kubenswrapper[4661]: I1001 05:33:42.321388 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jkssm" event={"ID":"5d5a94d5-db6c-439b-912e-ea501e444d57","Type":"ContainerDied","Data":"94b6b95bb33d0fbd559ca31b85767f315fa56d0ef6fc93a356295c457e23f891"} Oct 01 05:33:42 crc kubenswrapper[4661]: I1001 05:33:42.321434 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jkssm" event={"ID":"5d5a94d5-db6c-439b-912e-ea501e444d57","Type":"ContainerStarted","Data":"aaca2fd53067a56e23127c1686deab7595163aa2b7c504cfc5d6a697731aa1d5"} Oct 01 05:33:42 crc kubenswrapper[4661]: I1001 05:33:42.324692 4661 generic.go:334] "Generic (PLEG): container finished" podID="de60a77e-3fb7-4777-94de-54b40db66c7a" containerID="0d244f81a8316c096c2a5c65411d0599ec9b3532775fcca8e9fd1d2acb5e787f" exitCode=0 Oct 01 05:33:42 crc kubenswrapper[4661]: I1001 05:33:42.325205 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gwwgw" event={"ID":"de60a77e-3fb7-4777-94de-54b40db66c7a","Type":"ContainerDied","Data":"0d244f81a8316c096c2a5c65411d0599ec9b3532775fcca8e9fd1d2acb5e787f"} Oct 01 05:33:45 crc kubenswrapper[4661]: I1001 05:33:45.343267 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l56gs" event={"ID":"2dccb83e-7a19-4707-b2cc-8c5f68ebc261","Type":"ContainerStarted","Data":"32d3d2b96193b8ecdedc6da2f5c9647d1c64b9bd8f0a508bf9026c22111f25da"} Oct 01 05:33:45 crc kubenswrapper[4661]: I1001 05:33:45.346416 4661 generic.go:334] "Generic (PLEG): container finished" podID="5d5a94d5-db6c-439b-912e-ea501e444d57" containerID="9f1964303ba12236decdbf438833e61a048806e4a8265cd6b254b54253680f95" exitCode=0 Oct 01 05:33:45 crc kubenswrapper[4661]: I1001 05:33:45.346523 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jkssm" event={"ID":"5d5a94d5-db6c-439b-912e-ea501e444d57","Type":"ContainerDied","Data":"9f1964303ba12236decdbf438833e61a048806e4a8265cd6b254b54253680f95"} Oct 01 05:33:45 crc kubenswrapper[4661]: I1001 05:33:45.348923 4661 generic.go:334] "Generic (PLEG): container finished" podID="de60a77e-3fb7-4777-94de-54b40db66c7a" containerID="90c4694cc0b69178fa5d59a7fc737f871b67701a0f57dc392fd59d7c41948a46" exitCode=0 Oct 01 05:33:45 crc kubenswrapper[4661]: I1001 05:33:45.348970 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gwwgw" event={"ID":"de60a77e-3fb7-4777-94de-54b40db66c7a","Type":"ContainerDied","Data":"90c4694cc0b69178fa5d59a7fc737f871b67701a0f57dc392fd59d7c41948a46"} Oct 01 05:33:45 crc kubenswrapper[4661]: I1001 05:33:45.365725 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-l56gs" podStartSLOduration=4.171890716 podStartE2EDuration="7.365609304s" podCreationTimestamp="2025-10-01 05:33:38 +0000 UTC" firstStartedPulling="2025-10-01 05:33:40.306792849 +0000 UTC m=+269.244771493" lastFinishedPulling="2025-10-01 05:33:43.500511467 +0000 UTC m=+272.438490081" observedRunningTime="2025-10-01 05:33:45.360647153 +0000 UTC m=+274.298625787" watchObservedRunningTime="2025-10-01 05:33:45.365609304 +0000 UTC m=+274.303587948" Oct 01 05:33:46 crc kubenswrapper[4661]: I1001 05:33:46.356732 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jkssm" event={"ID":"5d5a94d5-db6c-439b-912e-ea501e444d57","Type":"ContainerStarted","Data":"a84d6839cb472865f6ebf6ad47fcd688ddf50ef92582a05f78db34d00a64980e"} Oct 01 05:33:46 crc kubenswrapper[4661]: I1001 05:33:46.359156 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gwwgw" event={"ID":"de60a77e-3fb7-4777-94de-54b40db66c7a","Type":"ContainerStarted","Data":"8a7fb4f6127cfbcb56e38e4a6e98033943e96a7081bbf19ac2cb032e4e299f59"} Oct 01 05:33:46 crc kubenswrapper[4661]: I1001 05:33:46.379028 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jkssm" podStartSLOduration=1.958866674 podStartE2EDuration="5.379005151s" podCreationTimestamp="2025-10-01 05:33:41 +0000 UTC" firstStartedPulling="2025-10-01 05:33:42.322688883 +0000 UTC m=+271.260667497" lastFinishedPulling="2025-10-01 05:33:45.74282736 +0000 UTC m=+274.680805974" observedRunningTime="2025-10-01 05:33:46.377142918 +0000 UTC m=+275.315121532" watchObservedRunningTime="2025-10-01 05:33:46.379005151 +0000 UTC m=+275.316983805" Oct 01 05:33:46 crc kubenswrapper[4661]: I1001 05:33:46.401457 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gwwgw" podStartSLOduration=2.922127359 podStartE2EDuration="6.401435498s" podCreationTimestamp="2025-10-01 05:33:40 +0000 UTC" firstStartedPulling="2025-10-01 05:33:42.325588536 +0000 UTC m=+271.263567150" lastFinishedPulling="2025-10-01 05:33:45.804896665 +0000 UTC m=+274.742875289" observedRunningTime="2025-10-01 05:33:46.398724031 +0000 UTC m=+275.336702655" watchObservedRunningTime="2025-10-01 05:33:46.401435498 +0000 UTC m=+275.339414152" Oct 01 05:33:48 crc kubenswrapper[4661]: I1001 05:33:48.388176 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tc8mt" Oct 01 05:33:48 crc kubenswrapper[4661]: I1001 05:33:48.388461 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tc8mt" Oct 01 05:33:48 crc kubenswrapper[4661]: I1001 05:33:48.434618 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tc8mt" Oct 01 05:33:48 crc kubenswrapper[4661]: I1001 05:33:48.956922 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-l56gs" Oct 01 05:33:48 crc kubenswrapper[4661]: I1001 05:33:48.957205 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-l56gs" Oct 01 05:33:49 crc kubenswrapper[4661]: I1001 05:33:49.419756 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tc8mt" Oct 01 05:33:49 crc kubenswrapper[4661]: I1001 05:33:49.995091 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-l56gs" podUID="2dccb83e-7a19-4707-b2cc-8c5f68ebc261" containerName="registry-server" probeResult="failure" output=< Oct 01 05:33:49 crc kubenswrapper[4661]: timeout: failed to connect service ":50051" within 1s Oct 01 05:33:49 crc kubenswrapper[4661]: > Oct 01 05:33:50 crc kubenswrapper[4661]: I1001 05:33:50.756296 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gwwgw" Oct 01 05:33:50 crc kubenswrapper[4661]: I1001 05:33:50.756350 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gwwgw" Oct 01 05:33:50 crc kubenswrapper[4661]: I1001 05:33:50.817301 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gwwgw" Oct 01 05:33:51 crc kubenswrapper[4661]: I1001 05:33:51.365603 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jkssm" Oct 01 05:33:51 crc kubenswrapper[4661]: I1001 05:33:51.365701 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jkssm" Oct 01 05:33:51 crc kubenswrapper[4661]: I1001 05:33:51.416226 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jkssm" Oct 01 05:33:51 crc kubenswrapper[4661]: I1001 05:33:51.434921 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gwwgw" Oct 01 05:33:51 crc kubenswrapper[4661]: I1001 05:33:51.468511 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jkssm" Oct 01 05:33:59 crc kubenswrapper[4661]: I1001 05:33:59.017219 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-l56gs" Oct 01 05:33:59 crc kubenswrapper[4661]: I1001 05:33:59.089478 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-l56gs" Oct 01 05:35:04 crc kubenswrapper[4661]: I1001 05:35:04.310573 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:35:04 crc kubenswrapper[4661]: I1001 05:35:04.311327 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:35:34 crc kubenswrapper[4661]: I1001 05:35:34.310322 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:35:34 crc kubenswrapper[4661]: I1001 05:35:34.311666 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:36:04 crc kubenswrapper[4661]: I1001 05:36:04.309367 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:36:04 crc kubenswrapper[4661]: I1001 05:36:04.311537 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:36:04 crc kubenswrapper[4661]: I1001 05:36:04.312132 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 05:36:04 crc kubenswrapper[4661]: I1001 05:36:04.313214 4661 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4d486eb4269c2cf09c13820c127de1ef573cf9774ba7665e8bb89052abf03b38"} pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 05:36:04 crc kubenswrapper[4661]: I1001 05:36:04.313348 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" containerID="cri-o://4d486eb4269c2cf09c13820c127de1ef573cf9774ba7665e8bb89052abf03b38" gracePeriod=600 Oct 01 05:36:05 crc kubenswrapper[4661]: I1001 05:36:05.281236 4661 generic.go:334] "Generic (PLEG): container finished" podID="7584c4bc-4202-487e-a2b4-4319f428a792" containerID="4d486eb4269c2cf09c13820c127de1ef573cf9774ba7665e8bb89052abf03b38" exitCode=0 Oct 01 05:36:05 crc kubenswrapper[4661]: I1001 05:36:05.281307 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerDied","Data":"4d486eb4269c2cf09c13820c127de1ef573cf9774ba7665e8bb89052abf03b38"} Oct 01 05:36:05 crc kubenswrapper[4661]: I1001 05:36:05.281680 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"7f40270c29bb2820cb8045b305f913484b609a17b862f508419b53c27f4a7e01"} Oct 01 05:36:05 crc kubenswrapper[4661]: I1001 05:36:05.281714 4661 scope.go:117] "RemoveContainer" containerID="c711db09b2a90deb4ab33e7dab5dacc13f0699768d38a2c41d04e3ba044216bb" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.672130 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-mgd4b"] Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.674231 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.687661 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-mgd4b"] Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.818088 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.818717 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/116a27bf-4f91-4f61-b7b7-d327751ba787-registry-certificates\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.818774 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/116a27bf-4f91-4f61-b7b7-d327751ba787-installation-pull-secrets\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.819042 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/116a27bf-4f91-4f61-b7b7-d327751ba787-bound-sa-token\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.819080 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/116a27bf-4f91-4f61-b7b7-d327751ba787-trusted-ca\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.819106 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/116a27bf-4f91-4f61-b7b7-d327751ba787-registry-tls\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.819147 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrbhj\" (UniqueName: \"kubernetes.io/projected/116a27bf-4f91-4f61-b7b7-d327751ba787-kube-api-access-qrbhj\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.819195 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/116a27bf-4f91-4f61-b7b7-d327751ba787-ca-trust-extracted\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.839007 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.920615 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/116a27bf-4f91-4f61-b7b7-d327751ba787-registry-certificates\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.920696 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/116a27bf-4f91-4f61-b7b7-d327751ba787-installation-pull-secrets\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.920727 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/116a27bf-4f91-4f61-b7b7-d327751ba787-bound-sa-token\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.920760 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/116a27bf-4f91-4f61-b7b7-d327751ba787-trusted-ca\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.920783 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/116a27bf-4f91-4f61-b7b7-d327751ba787-registry-tls\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.920824 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrbhj\" (UniqueName: \"kubernetes.io/projected/116a27bf-4f91-4f61-b7b7-d327751ba787-kube-api-access-qrbhj\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.920846 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/116a27bf-4f91-4f61-b7b7-d327751ba787-ca-trust-extracted\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.921231 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/116a27bf-4f91-4f61-b7b7-d327751ba787-ca-trust-extracted\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.922281 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/116a27bf-4f91-4f61-b7b7-d327751ba787-registry-certificates\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.922568 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/116a27bf-4f91-4f61-b7b7-d327751ba787-trusted-ca\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.928949 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/116a27bf-4f91-4f61-b7b7-d327751ba787-registry-tls\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.929055 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/116a27bf-4f91-4f61-b7b7-d327751ba787-installation-pull-secrets\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.940899 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrbhj\" (UniqueName: \"kubernetes.io/projected/116a27bf-4f91-4f61-b7b7-d327751ba787-kube-api-access-qrbhj\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:46 crc kubenswrapper[4661]: I1001 05:36:46.941741 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/116a27bf-4f91-4f61-b7b7-d327751ba787-bound-sa-token\") pod \"image-registry-66df7c8f76-mgd4b\" (UID: \"116a27bf-4f91-4f61-b7b7-d327751ba787\") " pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:47 crc kubenswrapper[4661]: I1001 05:36:47.005345 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:36:47 crc kubenswrapper[4661]: I1001 05:36:47.273613 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-mgd4b"] Oct 01 05:36:47 crc kubenswrapper[4661]: I1001 05:36:47.588897 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" event={"ID":"116a27bf-4f91-4f61-b7b7-d327751ba787","Type":"ContainerStarted","Data":"721cab6d053c67839a7c75dc308a0d1dc0a26cfcd79de728be35f274a06435fa"} Oct 01 05:36:47 crc kubenswrapper[4661]: I1001 05:36:47.588981 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" event={"ID":"116a27bf-4f91-4f61-b7b7-d327751ba787","Type":"ContainerStarted","Data":"9c96d429bd12b4fdf033d18553fcef054aad30e8f30d180f266caf42be895e2f"} Oct 01 05:36:47 crc kubenswrapper[4661]: I1001 05:36:47.589285 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:37:07 crc kubenswrapper[4661]: I1001 05:37:07.010800 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" Oct 01 05:37:07 crc kubenswrapper[4661]: I1001 05:37:07.042876 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-mgd4b" podStartSLOduration=21.042847643 podStartE2EDuration="21.042847643s" podCreationTimestamp="2025-10-01 05:36:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:36:47.6201571 +0000 UTC m=+456.558135784" watchObservedRunningTime="2025-10-01 05:37:07.042847643 +0000 UTC m=+475.980826297" Oct 01 05:37:07 crc kubenswrapper[4661]: I1001 05:37:07.089010 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2vp7m"] Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.142048 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" podUID="f5f5e95a-75e8-4950-b60e-6cbbeed59b67" containerName="registry" containerID="cri-o://d7d553d71abf7e4cf0f75c576968fc100e78a298d5de06458eff8403c49c30d4" gracePeriod=30 Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.558907 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.719427 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-registry-tls\") pod \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.719935 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-trusted-ca\") pod \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.720237 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-ca-trust-extracted\") pod \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.721860 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5rkl\" (UniqueName: \"kubernetes.io/projected/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-kube-api-access-r5rkl\") pod \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.721513 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "f5f5e95a-75e8-4950-b60e-6cbbeed59b67" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.722078 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.722104 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-registry-certificates\") pod \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.722161 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-installation-pull-secrets\") pod \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.722195 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-bound-sa-token\") pod \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\" (UID: \"f5f5e95a-75e8-4950-b60e-6cbbeed59b67\") " Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.722549 4661 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.723414 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "f5f5e95a-75e8-4950-b60e-6cbbeed59b67" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.728879 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-kube-api-access-r5rkl" (OuterVolumeSpecName: "kube-api-access-r5rkl") pod "f5f5e95a-75e8-4950-b60e-6cbbeed59b67" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67"). InnerVolumeSpecName "kube-api-access-r5rkl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.729417 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "f5f5e95a-75e8-4950-b60e-6cbbeed59b67" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.732191 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "f5f5e95a-75e8-4950-b60e-6cbbeed59b67" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.733541 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "f5f5e95a-75e8-4950-b60e-6cbbeed59b67" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.735964 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "f5f5e95a-75e8-4950-b60e-6cbbeed59b67" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.753372 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "f5f5e95a-75e8-4950-b60e-6cbbeed59b67" (UID: "f5f5e95a-75e8-4950-b60e-6cbbeed59b67"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.824685 4661 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.824753 4661 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.824779 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5rkl\" (UniqueName: \"kubernetes.io/projected/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-kube-api-access-r5rkl\") on node \"crc\" DevicePath \"\"" Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.824808 4661 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.824833 4661 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.824856 4661 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f5f5e95a-75e8-4950-b60e-6cbbeed59b67-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.895620 4661 generic.go:334] "Generic (PLEG): container finished" podID="f5f5e95a-75e8-4950-b60e-6cbbeed59b67" containerID="d7d553d71abf7e4cf0f75c576968fc100e78a298d5de06458eff8403c49c30d4" exitCode=0 Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.895732 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" event={"ID":"f5f5e95a-75e8-4950-b60e-6cbbeed59b67","Type":"ContainerDied","Data":"d7d553d71abf7e4cf0f75c576968fc100e78a298d5de06458eff8403c49c30d4"} Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.895782 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" event={"ID":"f5f5e95a-75e8-4950-b60e-6cbbeed59b67","Type":"ContainerDied","Data":"2f1cbcecbce1e52d0cfa523badb0624655de8c8d49bc62493d16819135311761"} Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.895788 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2vp7m" Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.895817 4661 scope.go:117] "RemoveContainer" containerID="d7d553d71abf7e4cf0f75c576968fc100e78a298d5de06458eff8403c49c30d4" Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.922839 4661 scope.go:117] "RemoveContainer" containerID="d7d553d71abf7e4cf0f75c576968fc100e78a298d5de06458eff8403c49c30d4" Oct 01 05:37:32 crc kubenswrapper[4661]: E1001 05:37:32.923394 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7d553d71abf7e4cf0f75c576968fc100e78a298d5de06458eff8403c49c30d4\": container with ID starting with d7d553d71abf7e4cf0f75c576968fc100e78a298d5de06458eff8403c49c30d4 not found: ID does not exist" containerID="d7d553d71abf7e4cf0f75c576968fc100e78a298d5de06458eff8403c49c30d4" Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.923461 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7d553d71abf7e4cf0f75c576968fc100e78a298d5de06458eff8403c49c30d4"} err="failed to get container status \"d7d553d71abf7e4cf0f75c576968fc100e78a298d5de06458eff8403c49c30d4\": rpc error: code = NotFound desc = could not find container \"d7d553d71abf7e4cf0f75c576968fc100e78a298d5de06458eff8403c49c30d4\": container with ID starting with d7d553d71abf7e4cf0f75c576968fc100e78a298d5de06458eff8403c49c30d4 not found: ID does not exist" Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.950253 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2vp7m"] Oct 01 05:37:32 crc kubenswrapper[4661]: I1001 05:37:32.955679 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2vp7m"] Oct 01 05:37:33 crc kubenswrapper[4661]: I1001 05:37:33.774161 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5f5e95a-75e8-4950-b60e-6cbbeed59b67" path="/var/lib/kubelet/pods/f5f5e95a-75e8-4950-b60e-6cbbeed59b67/volumes" Oct 01 05:38:04 crc kubenswrapper[4661]: I1001 05:38:04.310597 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:38:04 crc kubenswrapper[4661]: I1001 05:38:04.311591 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:38:11 crc kubenswrapper[4661]: I1001 05:38:11.924527 4661 scope.go:117] "RemoveContainer" containerID="99d542c07a52f7661236a53fbf80e1e7294488d230c5a06788aa0fbb2123fc98" Oct 01 05:38:34 crc kubenswrapper[4661]: I1001 05:38:34.309762 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:38:34 crc kubenswrapper[4661]: I1001 05:38:34.310442 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:38:57 crc kubenswrapper[4661]: I1001 05:38:57.992279 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-qx25f"] Oct 01 05:38:57 crc kubenswrapper[4661]: E1001 05:38:57.993065 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5f5e95a-75e8-4950-b60e-6cbbeed59b67" containerName="registry" Oct 01 05:38:57 crc kubenswrapper[4661]: I1001 05:38:57.993081 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5f5e95a-75e8-4950-b60e-6cbbeed59b67" containerName="registry" Oct 01 05:38:57 crc kubenswrapper[4661]: I1001 05:38:57.993219 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5f5e95a-75e8-4950-b60e-6cbbeed59b67" containerName="registry" Oct 01 05:38:57 crc kubenswrapper[4661]: I1001 05:38:57.993726 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-qx25f" Oct 01 05:38:57 crc kubenswrapper[4661]: I1001 05:38:57.996312 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Oct 01 05:38:57 crc kubenswrapper[4661]: I1001 05:38:57.996467 4661 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-w5bdx" Oct 01 05:38:57 crc kubenswrapper[4661]: I1001 05:38:57.999044 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.005547 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-jpkgm"] Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.006358 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-jpkgm" Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.011504 4661 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-qc5vt" Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.015750 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-qx25f"] Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.021254 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-jpkgm"] Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.023404 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-4tqmh"] Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.024059 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-4tqmh" Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.026385 4661 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-gwvmn" Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.039500 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-4tqmh"] Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.064300 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvsrd\" (UniqueName: \"kubernetes.io/projected/c4de8059-947f-4d97-ad30-a2a3e1081b19-kube-api-access-fvsrd\") pod \"cert-manager-cainjector-7f985d654d-qx25f\" (UID: \"c4de8059-947f-4d97-ad30-a2a3e1081b19\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-qx25f" Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.064359 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbqvj\" (UniqueName: \"kubernetes.io/projected/c676e4ec-6d7a-48a8-a54e-cb33046615f3-kube-api-access-tbqvj\") pod \"cert-manager-5b446d88c5-jpkgm\" (UID: \"c676e4ec-6d7a-48a8-a54e-cb33046615f3\") " pod="cert-manager/cert-manager-5b446d88c5-jpkgm" Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.064433 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7b5bv\" (UniqueName: \"kubernetes.io/projected/8206bae1-8eaa-4f6b-9531-9c200316c97c-kube-api-access-7b5bv\") pod \"cert-manager-webhook-5655c58dd6-4tqmh\" (UID: \"8206bae1-8eaa-4f6b-9531-9c200316c97c\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-4tqmh" Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.165826 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7b5bv\" (UniqueName: \"kubernetes.io/projected/8206bae1-8eaa-4f6b-9531-9c200316c97c-kube-api-access-7b5bv\") pod \"cert-manager-webhook-5655c58dd6-4tqmh\" (UID: \"8206bae1-8eaa-4f6b-9531-9c200316c97c\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-4tqmh" Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.165964 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvsrd\" (UniqueName: \"kubernetes.io/projected/c4de8059-947f-4d97-ad30-a2a3e1081b19-kube-api-access-fvsrd\") pod \"cert-manager-cainjector-7f985d654d-qx25f\" (UID: \"c4de8059-947f-4d97-ad30-a2a3e1081b19\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-qx25f" Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.166014 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbqvj\" (UniqueName: \"kubernetes.io/projected/c676e4ec-6d7a-48a8-a54e-cb33046615f3-kube-api-access-tbqvj\") pod \"cert-manager-5b446d88c5-jpkgm\" (UID: \"c676e4ec-6d7a-48a8-a54e-cb33046615f3\") " pod="cert-manager/cert-manager-5b446d88c5-jpkgm" Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.184044 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvsrd\" (UniqueName: \"kubernetes.io/projected/c4de8059-947f-4d97-ad30-a2a3e1081b19-kube-api-access-fvsrd\") pod \"cert-manager-cainjector-7f985d654d-qx25f\" (UID: \"c4de8059-947f-4d97-ad30-a2a3e1081b19\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-qx25f" Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.188464 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbqvj\" (UniqueName: \"kubernetes.io/projected/c676e4ec-6d7a-48a8-a54e-cb33046615f3-kube-api-access-tbqvj\") pod \"cert-manager-5b446d88c5-jpkgm\" (UID: \"c676e4ec-6d7a-48a8-a54e-cb33046615f3\") " pod="cert-manager/cert-manager-5b446d88c5-jpkgm" Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.193827 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7b5bv\" (UniqueName: \"kubernetes.io/projected/8206bae1-8eaa-4f6b-9531-9c200316c97c-kube-api-access-7b5bv\") pod \"cert-manager-webhook-5655c58dd6-4tqmh\" (UID: \"8206bae1-8eaa-4f6b-9531-9c200316c97c\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-4tqmh" Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.310464 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-qx25f" Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.322296 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-jpkgm" Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.338056 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-4tqmh" Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.583547 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-jpkgm"] Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.592406 4661 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 05:38:58 crc kubenswrapper[4661]: W1001 05:38:58.619185 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8206bae1_8eaa_4f6b_9531_9c200316c97c.slice/crio-8605dbc50ee4bba4a36c956b264eb516c76eaa03f069cb161363ab37e5283dfd WatchSource:0}: Error finding container 8605dbc50ee4bba4a36c956b264eb516c76eaa03f069cb161363ab37e5283dfd: Status 404 returned error can't find the container with id 8605dbc50ee4bba4a36c956b264eb516c76eaa03f069cb161363ab37e5283dfd Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.622683 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-4tqmh"] Oct 01 05:38:58 crc kubenswrapper[4661]: I1001 05:38:58.764377 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-qx25f"] Oct 01 05:38:58 crc kubenswrapper[4661]: W1001 05:38:58.775075 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4de8059_947f_4d97_ad30_a2a3e1081b19.slice/crio-de6aa05a40582d6320bb9a33dacb3cca4bdd63b257c415232aeec80a43e3df77 WatchSource:0}: Error finding container de6aa05a40582d6320bb9a33dacb3cca4bdd63b257c415232aeec80a43e3df77: Status 404 returned error can't find the container with id de6aa05a40582d6320bb9a33dacb3cca4bdd63b257c415232aeec80a43e3df77 Oct 01 05:38:59 crc kubenswrapper[4661]: I1001 05:38:59.480667 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-jpkgm" event={"ID":"c676e4ec-6d7a-48a8-a54e-cb33046615f3","Type":"ContainerStarted","Data":"8d010f836c9c4e2a1231a369e18d4bfdd8a58f7b4bc7f0822099194325d8f967"} Oct 01 05:38:59 crc kubenswrapper[4661]: I1001 05:38:59.483747 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-qx25f" event={"ID":"c4de8059-947f-4d97-ad30-a2a3e1081b19","Type":"ContainerStarted","Data":"de6aa05a40582d6320bb9a33dacb3cca4bdd63b257c415232aeec80a43e3df77"} Oct 01 05:38:59 crc kubenswrapper[4661]: I1001 05:38:59.486753 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-4tqmh" event={"ID":"8206bae1-8eaa-4f6b-9531-9c200316c97c","Type":"ContainerStarted","Data":"8605dbc50ee4bba4a36c956b264eb516c76eaa03f069cb161363ab37e5283dfd"} Oct 01 05:39:02 crc kubenswrapper[4661]: I1001 05:39:02.526838 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-qx25f" event={"ID":"c4de8059-947f-4d97-ad30-a2a3e1081b19","Type":"ContainerStarted","Data":"2d3afc079834c645b75b2ee34bb2b9a6740365864fec4de3929d64f9b71b2982"} Oct 01 05:39:02 crc kubenswrapper[4661]: I1001 05:39:02.528997 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-4tqmh" event={"ID":"8206bae1-8eaa-4f6b-9531-9c200316c97c","Type":"ContainerStarted","Data":"e36de33b6a4962be01125e1ac74789477048bb0b937fafcdfd6e552cf642e4ec"} Oct 01 05:39:02 crc kubenswrapper[4661]: I1001 05:39:02.529115 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-4tqmh" Oct 01 05:39:02 crc kubenswrapper[4661]: I1001 05:39:02.531109 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-jpkgm" event={"ID":"c676e4ec-6d7a-48a8-a54e-cb33046615f3","Type":"ContainerStarted","Data":"9ec6ca07349611ce0672b945869857ae81e091be049dba5557765c96f653f835"} Oct 01 05:39:02 crc kubenswrapper[4661]: I1001 05:39:02.546821 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-qx25f" podStartSLOduration=2.24738244 podStartE2EDuration="5.546800758s" podCreationTimestamp="2025-10-01 05:38:57 +0000 UTC" firstStartedPulling="2025-10-01 05:38:58.778109235 +0000 UTC m=+587.716087849" lastFinishedPulling="2025-10-01 05:39:02.077527553 +0000 UTC m=+591.015506167" observedRunningTime="2025-10-01 05:39:02.541288472 +0000 UTC m=+591.479267096" watchObservedRunningTime="2025-10-01 05:39:02.546800758 +0000 UTC m=+591.484779402" Oct 01 05:39:02 crc kubenswrapper[4661]: I1001 05:39:02.557553 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-jpkgm" podStartSLOduration=2.063783755 podStartE2EDuration="5.557501362s" podCreationTimestamp="2025-10-01 05:38:57 +0000 UTC" firstStartedPulling="2025-10-01 05:38:58.592172133 +0000 UTC m=+587.530150747" lastFinishedPulling="2025-10-01 05:39:02.08588974 +0000 UTC m=+591.023868354" observedRunningTime="2025-10-01 05:39:02.555526615 +0000 UTC m=+591.493505259" watchObservedRunningTime="2025-10-01 05:39:02.557501362 +0000 UTC m=+591.495479996" Oct 01 05:39:02 crc kubenswrapper[4661]: I1001 05:39:02.596425 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-4tqmh" podStartSLOduration=1.069433825 podStartE2EDuration="4.596399535s" podCreationTimestamp="2025-10-01 05:38:58 +0000 UTC" firstStartedPulling="2025-10-01 05:38:58.620930488 +0000 UTC m=+587.558909102" lastFinishedPulling="2025-10-01 05:39:02.147896198 +0000 UTC m=+591.085874812" observedRunningTime="2025-10-01 05:39:02.593991316 +0000 UTC m=+591.531969940" watchObservedRunningTime="2025-10-01 05:39:02.596399535 +0000 UTC m=+591.534378169" Oct 01 05:39:04 crc kubenswrapper[4661]: I1001 05:39:04.309201 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:39:04 crc kubenswrapper[4661]: I1001 05:39:04.309678 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:39:04 crc kubenswrapper[4661]: I1001 05:39:04.309736 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 05:39:04 crc kubenswrapper[4661]: I1001 05:39:04.310489 4661 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7f40270c29bb2820cb8045b305f913484b609a17b862f508419b53c27f4a7e01"} pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 05:39:04 crc kubenswrapper[4661]: I1001 05:39:04.310664 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" containerID="cri-o://7f40270c29bb2820cb8045b305f913484b609a17b862f508419b53c27f4a7e01" gracePeriod=600 Oct 01 05:39:04 crc kubenswrapper[4661]: I1001 05:39:04.546105 4661 generic.go:334] "Generic (PLEG): container finished" podID="7584c4bc-4202-487e-a2b4-4319f428a792" containerID="7f40270c29bb2820cb8045b305f913484b609a17b862f508419b53c27f4a7e01" exitCode=0 Oct 01 05:39:04 crc kubenswrapper[4661]: I1001 05:39:04.546195 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerDied","Data":"7f40270c29bb2820cb8045b305f913484b609a17b862f508419b53c27f4a7e01"} Oct 01 05:39:04 crc kubenswrapper[4661]: I1001 05:39:04.546250 4661 scope.go:117] "RemoveContainer" containerID="4d486eb4269c2cf09c13820c127de1ef573cf9774ba7665e8bb89052abf03b38" Oct 01 05:39:05 crc kubenswrapper[4661]: I1001 05:39:05.556993 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"4e198ab64b6d3a437e0b5ab538bb0a82963de29658cbc65ddbf1c080d70a0ec7"} Oct 01 05:39:08 crc kubenswrapper[4661]: I1001 05:39:08.342068 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-4tqmh" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.002562 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-fj7kz"] Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.003715 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovn-controller" containerID="cri-o://a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8" gracePeriod=30 Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.004180 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="northd" containerID="cri-o://a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536" gracePeriod=30 Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.004363 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea" gracePeriod=30 Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.004229 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="sbdb" containerID="cri-o://4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670" gracePeriod=30 Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.004322 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="kube-rbac-proxy-node" containerID="cri-o://71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca" gracePeriod=30 Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.004276 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovn-acl-logging" containerID="cri-o://6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590" gracePeriod=30 Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.004417 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="nbdb" containerID="cri-o://a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e" gracePeriod=30 Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.059732 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovnkube-controller" containerID="cri-o://d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e" gracePeriod=30 Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.349732 4661 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6834e918_6be2_4c19_ac03_80fa36a2659c.slice/crio-conmon-a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6834e918_6be2_4c19_ac03_80fa36a2659c.slice/crio-conmon-a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536.scope\": RecentStats: unable to find data in memory cache]" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.356690 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fj7kz_6834e918-6be2-4c19-ac03-80fa36a2659c/ovnkube-controller/3.log" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.359342 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fj7kz_6834e918-6be2-4c19-ac03-80fa36a2659c/ovn-acl-logging/0.log" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.360033 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fj7kz_6834e918-6be2-4c19-ac03-80fa36a2659c/ovn-controller/0.log" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.360766 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423173 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-pl295"] Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.423446 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="kube-rbac-proxy-node" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423461 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="kube-rbac-proxy-node" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.423471 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovnkube-controller" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423477 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovnkube-controller" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.423507 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="kubecfg-setup" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423514 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="kubecfg-setup" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.423521 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovnkube-controller" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423527 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovnkube-controller" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.423534 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovnkube-controller" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423540 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovnkube-controller" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.423546 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovn-controller" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423552 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovn-controller" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.423562 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="kube-rbac-proxy-ovn-metrics" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423585 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="kube-rbac-proxy-ovn-metrics" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.423594 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovnkube-controller" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423599 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovnkube-controller" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.423609 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="sbdb" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423615 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="sbdb" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.423698 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="nbdb" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423704 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="nbdb" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.423712 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="northd" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423717 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="northd" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.423745 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovn-acl-logging" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423752 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovn-acl-logging" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423874 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="northd" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423907 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="sbdb" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423918 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovnkube-controller" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423925 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="kube-rbac-proxy-node" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423932 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovnkube-controller" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423941 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovnkube-controller" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423947 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovnkube-controller" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423954 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovn-controller" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423980 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovn-acl-logging" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423987 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="kube-rbac-proxy-ovn-metrics" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.423995 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovnkube-controller" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.424001 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="nbdb" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.424111 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovnkube-controller" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.424138 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerName="ovnkube-controller" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.425965 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426109 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-run-openvswitch\") pod \"6834e918-6be2-4c19-ac03-80fa36a2659c\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426173 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-cni-netd\") pod \"6834e918-6be2-4c19-ac03-80fa36a2659c\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426224 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqhtk\" (UniqueName: \"kubernetes.io/projected/6834e918-6be2-4c19-ac03-80fa36a2659c-kube-api-access-kqhtk\") pod \"6834e918-6be2-4c19-ac03-80fa36a2659c\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426257 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-systemd-units\") pod \"6834e918-6be2-4c19-ac03-80fa36a2659c\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426300 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-run-ovn\") pod \"6834e918-6be2-4c19-ac03-80fa36a2659c\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426330 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-run-netns\") pod \"6834e918-6be2-4c19-ac03-80fa36a2659c\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426373 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6834e918-6be2-4c19-ac03-80fa36a2659c-env-overrides\") pod \"6834e918-6be2-4c19-ac03-80fa36a2659c\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426255 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "6834e918-6be2-4c19-ac03-80fa36a2659c" (UID: "6834e918-6be2-4c19-ac03-80fa36a2659c"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426363 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "6834e918-6be2-4c19-ac03-80fa36a2659c" (UID: "6834e918-6be2-4c19-ac03-80fa36a2659c"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426372 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "6834e918-6be2-4c19-ac03-80fa36a2659c" (UID: "6834e918-6be2-4c19-ac03-80fa36a2659c"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426377 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "6834e918-6be2-4c19-ac03-80fa36a2659c" (UID: "6834e918-6be2-4c19-ac03-80fa36a2659c"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426470 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "6834e918-6be2-4c19-ac03-80fa36a2659c" (UID: "6834e918-6be2-4c19-ac03-80fa36a2659c"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426403 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-run-ovn-kubernetes\") pod \"6834e918-6be2-4c19-ac03-80fa36a2659c\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426531 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-run-systemd\") pod \"6834e918-6be2-4c19-ac03-80fa36a2659c\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426560 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-cni-bin\") pod \"6834e918-6be2-4c19-ac03-80fa36a2659c\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426594 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-node-log\") pod \"6834e918-6be2-4c19-ac03-80fa36a2659c\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426663 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"6834e918-6be2-4c19-ac03-80fa36a2659c\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426692 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-etc-openvswitch\") pod \"6834e918-6be2-4c19-ac03-80fa36a2659c\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426723 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-var-lib-openvswitch\") pod \"6834e918-6be2-4c19-ac03-80fa36a2659c\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426754 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6834e918-6be2-4c19-ac03-80fa36a2659c-ovnkube-config\") pod \"6834e918-6be2-4c19-ac03-80fa36a2659c\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426774 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "6834e918-6be2-4c19-ac03-80fa36a2659c" (UID: "6834e918-6be2-4c19-ac03-80fa36a2659c"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426780 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-slash\") pod \"6834e918-6be2-4c19-ac03-80fa36a2659c\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426809 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-slash" (OuterVolumeSpecName: "host-slash") pod "6834e918-6be2-4c19-ac03-80fa36a2659c" (UID: "6834e918-6be2-4c19-ac03-80fa36a2659c"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426843 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-log-socket\") pod \"6834e918-6be2-4c19-ac03-80fa36a2659c\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426895 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6834e918-6be2-4c19-ac03-80fa36a2659c-ovnkube-script-lib\") pod \"6834e918-6be2-4c19-ac03-80fa36a2659c\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426926 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-kubelet\") pod \"6834e918-6be2-4c19-ac03-80fa36a2659c\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426941 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6834e918-6be2-4c19-ac03-80fa36a2659c-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6834e918-6be2-4c19-ac03-80fa36a2659c" (UID: "6834e918-6be2-4c19-ac03-80fa36a2659c"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.426949 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6834e918-6be2-4c19-ac03-80fa36a2659c-ovn-node-metrics-cert\") pod \"6834e918-6be2-4c19-ac03-80fa36a2659c\" (UID: \"6834e918-6be2-4c19-ac03-80fa36a2659c\") " Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.427270 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "6834e918-6be2-4c19-ac03-80fa36a2659c" (UID: "6834e918-6be2-4c19-ac03-80fa36a2659c"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.427320 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "6834e918-6be2-4c19-ac03-80fa36a2659c" (UID: "6834e918-6be2-4c19-ac03-80fa36a2659c"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.427355 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-node-log" (OuterVolumeSpecName: "node-log") pod "6834e918-6be2-4c19-ac03-80fa36a2659c" (UID: "6834e918-6be2-4c19-ac03-80fa36a2659c"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.427390 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "6834e918-6be2-4c19-ac03-80fa36a2659c" (UID: "6834e918-6be2-4c19-ac03-80fa36a2659c"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.427400 4661 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.427419 4661 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.427431 4661 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.427435 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "6834e918-6be2-4c19-ac03-80fa36a2659c" (UID: "6834e918-6be2-4c19-ac03-80fa36a2659c"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.427445 4661 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.427480 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "6834e918-6be2-4c19-ac03-80fa36a2659c" (UID: "6834e918-6be2-4c19-ac03-80fa36a2659c"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.427485 4661 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.427512 4661 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6834e918-6be2-4c19-ac03-80fa36a2659c-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.427525 4661 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.427537 4661 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.427551 4661 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.427562 4661 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-slash\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.427589 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-log-socket" (OuterVolumeSpecName: "log-socket") pod "6834e918-6be2-4c19-ac03-80fa36a2659c" (UID: "6834e918-6be2-4c19-ac03-80fa36a2659c"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.427935 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6834e918-6be2-4c19-ac03-80fa36a2659c-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6834e918-6be2-4c19-ac03-80fa36a2659c" (UID: "6834e918-6be2-4c19-ac03-80fa36a2659c"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.428103 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6834e918-6be2-4c19-ac03-80fa36a2659c-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6834e918-6be2-4c19-ac03-80fa36a2659c" (UID: "6834e918-6be2-4c19-ac03-80fa36a2659c"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.445908 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6834e918-6be2-4c19-ac03-80fa36a2659c-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6834e918-6be2-4c19-ac03-80fa36a2659c" (UID: "6834e918-6be2-4c19-ac03-80fa36a2659c"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.446057 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6834e918-6be2-4c19-ac03-80fa36a2659c-kube-api-access-kqhtk" (OuterVolumeSpecName: "kube-api-access-kqhtk") pod "6834e918-6be2-4c19-ac03-80fa36a2659c" (UID: "6834e918-6be2-4c19-ac03-80fa36a2659c"). InnerVolumeSpecName "kube-api-access-kqhtk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.456531 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "6834e918-6be2-4c19-ac03-80fa36a2659c" (UID: "6834e918-6be2-4c19-ac03-80fa36a2659c"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.528547 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-host-run-netns\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.528597 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-host-slash\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.528621 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/54eaf765-501e-41a6-b568-8fa2fb344af6-env-overrides\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.528676 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzfx9\" (UniqueName: \"kubernetes.io/projected/54eaf765-501e-41a6-b568-8fa2fb344af6-kube-api-access-nzfx9\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.528702 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-run-systemd\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.528729 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/54eaf765-501e-41a6-b568-8fa2fb344af6-ovn-node-metrics-cert\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.528752 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-host-kubelet\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.528788 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-host-run-ovn-kubernetes\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.528818 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.528837 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-etc-openvswitch\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.528876 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-systemd-units\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.528909 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-var-lib-openvswitch\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.528927 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-node-log\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.528942 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-host-cni-netd\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.528958 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-run-openvswitch\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.529053 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/54eaf765-501e-41a6-b568-8fa2fb344af6-ovnkube-config\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.529137 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-log-socket\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.529200 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/54eaf765-501e-41a6-b568-8fa2fb344af6-ovnkube-script-lib\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.529248 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-run-ovn\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.529281 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-host-cni-bin\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.529377 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqhtk\" (UniqueName: \"kubernetes.io/projected/6834e918-6be2-4c19-ac03-80fa36a2659c-kube-api-access-kqhtk\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.529395 4661 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.529409 4661 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-node-log\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.529422 4661 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.529434 4661 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.529449 4661 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6834e918-6be2-4c19-ac03-80fa36a2659c-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.529462 4661 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-log-socket\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.529476 4661 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6834e918-6be2-4c19-ac03-80fa36a2659c-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.529488 4661 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/6834e918-6be2-4c19-ac03-80fa36a2659c-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.529500 4661 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6834e918-6be2-4c19-ac03-80fa36a2659c-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.580453 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l96mp_dc3b0e2f-f27e-4420-9323-ec45878c11a6/kube-multus/2.log" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.581006 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l96mp_dc3b0e2f-f27e-4420-9323-ec45878c11a6/kube-multus/1.log" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.581044 4661 generic.go:334] "Generic (PLEG): container finished" podID="dc3b0e2f-f27e-4420-9323-ec45878c11a6" containerID="c4f57a5e14aa5a8c99b44e2c1f2de034c084a8ede520375e6f4bc54461906a3e" exitCode=2 Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.581096 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l96mp" event={"ID":"dc3b0e2f-f27e-4420-9323-ec45878c11a6","Type":"ContainerDied","Data":"c4f57a5e14aa5a8c99b44e2c1f2de034c084a8ede520375e6f4bc54461906a3e"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.581127 4661 scope.go:117] "RemoveContainer" containerID="9a1102b0dd486d08f91d8b9557d01459f38ec90304d866afb5fc273c51680b51" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.581857 4661 scope.go:117] "RemoveContainer" containerID="c4f57a5e14aa5a8c99b44e2c1f2de034c084a8ede520375e6f4bc54461906a3e" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.582301 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-l96mp_openshift-multus(dc3b0e2f-f27e-4420-9323-ec45878c11a6)\"" pod="openshift-multus/multus-l96mp" podUID="dc3b0e2f-f27e-4420-9323-ec45878c11a6" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.582988 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fj7kz_6834e918-6be2-4c19-ac03-80fa36a2659c/ovnkube-controller/3.log" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.585142 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fj7kz_6834e918-6be2-4c19-ac03-80fa36a2659c/ovn-acl-logging/0.log" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.585708 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fj7kz_6834e918-6be2-4c19-ac03-80fa36a2659c/ovn-controller/0.log" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586121 4661 generic.go:334] "Generic (PLEG): container finished" podID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerID="d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e" exitCode=0 Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586140 4661 generic.go:334] "Generic (PLEG): container finished" podID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerID="4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670" exitCode=0 Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586149 4661 generic.go:334] "Generic (PLEG): container finished" podID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerID="a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e" exitCode=0 Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586155 4661 generic.go:334] "Generic (PLEG): container finished" podID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerID="a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536" exitCode=0 Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586162 4661 generic.go:334] "Generic (PLEG): container finished" podID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerID="a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea" exitCode=0 Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586168 4661 generic.go:334] "Generic (PLEG): container finished" podID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerID="71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca" exitCode=0 Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586175 4661 generic.go:334] "Generic (PLEG): container finished" podID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerID="6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590" exitCode=143 Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586183 4661 generic.go:334] "Generic (PLEG): container finished" podID="6834e918-6be2-4c19-ac03-80fa36a2659c" containerID="a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8" exitCode=143 Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586223 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerDied","Data":"d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586266 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586268 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerDied","Data":"4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586404 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerDied","Data":"a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586429 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerDied","Data":"a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586449 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerDied","Data":"a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586470 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerDied","Data":"71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586490 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586506 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586515 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586522 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586530 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586537 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586545 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586553 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586561 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586567 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586578 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerDied","Data":"6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586590 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586598 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586605 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586615 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586625 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586660 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586670 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586679 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586689 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586698 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586709 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerDied","Data":"a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586721 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586732 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586740 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586750 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586760 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586769 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586780 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586790 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586801 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586811 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586825 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fj7kz" event={"ID":"6834e918-6be2-4c19-ac03-80fa36a2659c","Type":"ContainerDied","Data":"2b30a8b4664ce0f91994e2edb6cf7edd82ace404222ec65878b7b332991044bb"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586841 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586850 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586858 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586865 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586873 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586881 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586888 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586896 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586903 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.586910 4661 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006"} Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.611672 4661 scope.go:117] "RemoveContainer" containerID="d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.624959 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-fj7kz"] Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.629929 4661 scope.go:117] "RemoveContainer" containerID="faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.630520 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-var-lib-openvswitch\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.630576 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-node-log\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.630621 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-host-cni-netd\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.630692 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-var-lib-openvswitch\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.630711 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-run-openvswitch\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.630753 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/54eaf765-501e-41a6-b568-8fa2fb344af6-ovnkube-config\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.630795 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-log-socket\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.630831 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/54eaf765-501e-41a6-b568-8fa2fb344af6-ovnkube-script-lib\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.630869 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-run-ovn\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.630918 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-host-cni-bin\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.630951 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-host-run-netns\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.630982 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-host-slash\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.631012 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/54eaf765-501e-41a6-b568-8fa2fb344af6-env-overrides\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.631043 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzfx9\" (UniqueName: \"kubernetes.io/projected/54eaf765-501e-41a6-b568-8fa2fb344af6-kube-api-access-nzfx9\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.631072 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-run-systemd\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.631108 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/54eaf765-501e-41a6-b568-8fa2fb344af6-ovn-node-metrics-cert\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.631141 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-host-kubelet\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.631172 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-host-run-ovn-kubernetes\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.631210 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.631252 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-etc-openvswitch\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.631288 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-systemd-units\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.631342 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-host-cni-netd\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.631356 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-host-cni-bin\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.631382 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-run-systemd\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.631374 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-node-log\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.631429 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-host-kubelet\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.631457 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-log-socket\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.631457 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-host-slash\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.631478 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.631532 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-host-run-netns\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.631586 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-run-ovn\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.631396 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-host-run-ovn-kubernetes\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.631938 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-run-openvswitch\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.631968 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-etc-openvswitch\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.632170 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/54eaf765-501e-41a6-b568-8fa2fb344af6-ovnkube-config\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.632224 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/54eaf765-501e-41a6-b568-8fa2fb344af6-ovnkube-script-lib\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.632422 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/54eaf765-501e-41a6-b568-8fa2fb344af6-env-overrides\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.632505 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-fj7kz"] Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.632520 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/54eaf765-501e-41a6-b568-8fa2fb344af6-systemd-units\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.635174 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/54eaf765-501e-41a6-b568-8fa2fb344af6-ovn-node-metrics-cert\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.650357 4661 scope.go:117] "RemoveContainer" containerID="4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.652109 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzfx9\" (UniqueName: \"kubernetes.io/projected/54eaf765-501e-41a6-b568-8fa2fb344af6-kube-api-access-nzfx9\") pod \"ovnkube-node-pl295\" (UID: \"54eaf765-501e-41a6-b568-8fa2fb344af6\") " pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.669562 4661 scope.go:117] "RemoveContainer" containerID="a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.688476 4661 scope.go:117] "RemoveContainer" containerID="a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.708220 4661 scope.go:117] "RemoveContainer" containerID="a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.727457 4661 scope.go:117] "RemoveContainer" containerID="71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.745429 4661 scope.go:117] "RemoveContainer" containerID="6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.760164 4661 scope.go:117] "RemoveContainer" containerID="a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.767502 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6834e918-6be2-4c19-ac03-80fa36a2659c" path="/var/lib/kubelet/pods/6834e918-6be2-4c19-ac03-80fa36a2659c/volumes" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.769481 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.779535 4661 scope.go:117] "RemoveContainer" containerID="01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.805908 4661 scope.go:117] "RemoveContainer" containerID="d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.806420 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e\": container with ID starting with d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e not found: ID does not exist" containerID="d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.806457 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e"} err="failed to get container status \"d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e\": rpc error: code = NotFound desc = could not find container \"d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e\": container with ID starting with d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.806481 4661 scope.go:117] "RemoveContainer" containerID="faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.806975 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8\": container with ID starting with faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8 not found: ID does not exist" containerID="faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.807070 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8"} err="failed to get container status \"faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8\": rpc error: code = NotFound desc = could not find container \"faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8\": container with ID starting with faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.807156 4661 scope.go:117] "RemoveContainer" containerID="4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.807715 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\": container with ID starting with 4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670 not found: ID does not exist" containerID="4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.807823 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670"} err="failed to get container status \"4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\": rpc error: code = NotFound desc = could not find container \"4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\": container with ID starting with 4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.807880 4661 scope.go:117] "RemoveContainer" containerID="a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.808337 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\": container with ID starting with a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e not found: ID does not exist" containerID="a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.808383 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e"} err="failed to get container status \"a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\": rpc error: code = NotFound desc = could not find container \"a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\": container with ID starting with a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.808417 4661 scope.go:117] "RemoveContainer" containerID="a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.808969 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\": container with ID starting with a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536 not found: ID does not exist" containerID="a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.809001 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536"} err="failed to get container status \"a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\": rpc error: code = NotFound desc = could not find container \"a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\": container with ID starting with a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.809020 4661 scope.go:117] "RemoveContainer" containerID="a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.809414 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\": container with ID starting with a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea not found: ID does not exist" containerID="a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.809457 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea"} err="failed to get container status \"a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\": rpc error: code = NotFound desc = could not find container \"a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\": container with ID starting with a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.809487 4661 scope.go:117] "RemoveContainer" containerID="71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.809891 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\": container with ID starting with 71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca not found: ID does not exist" containerID="71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.809921 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca"} err="failed to get container status \"71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\": rpc error: code = NotFound desc = could not find container \"71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\": container with ID starting with 71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.809942 4661 scope.go:117] "RemoveContainer" containerID="6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.810358 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\": container with ID starting with 6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590 not found: ID does not exist" containerID="6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.810385 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590"} err="failed to get container status \"6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\": rpc error: code = NotFound desc = could not find container \"6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\": container with ID starting with 6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.810405 4661 scope.go:117] "RemoveContainer" containerID="a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.810870 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\": container with ID starting with a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8 not found: ID does not exist" containerID="a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.810912 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8"} err="failed to get container status \"a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\": rpc error: code = NotFound desc = could not find container \"a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\": container with ID starting with a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.810938 4661 scope.go:117] "RemoveContainer" containerID="01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006" Oct 01 05:39:09 crc kubenswrapper[4661]: E1001 05:39:09.811433 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\": container with ID starting with 01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006 not found: ID does not exist" containerID="01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.811529 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006"} err="failed to get container status \"01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\": rpc error: code = NotFound desc = could not find container \"01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\": container with ID starting with 01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.811561 4661 scope.go:117] "RemoveContainer" containerID="d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.812644 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e"} err="failed to get container status \"d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e\": rpc error: code = NotFound desc = could not find container \"d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e\": container with ID starting with d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.812676 4661 scope.go:117] "RemoveContainer" containerID="faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.813053 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8"} err="failed to get container status \"faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8\": rpc error: code = NotFound desc = could not find container \"faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8\": container with ID starting with faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.813092 4661 scope.go:117] "RemoveContainer" containerID="4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.813434 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670"} err="failed to get container status \"4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\": rpc error: code = NotFound desc = could not find container \"4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\": container with ID starting with 4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.813458 4661 scope.go:117] "RemoveContainer" containerID="a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.813861 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e"} err="failed to get container status \"a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\": rpc error: code = NotFound desc = could not find container \"a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\": container with ID starting with a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.813891 4661 scope.go:117] "RemoveContainer" containerID="a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.814175 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536"} err="failed to get container status \"a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\": rpc error: code = NotFound desc = could not find container \"a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\": container with ID starting with a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.814195 4661 scope.go:117] "RemoveContainer" containerID="a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.814556 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea"} err="failed to get container status \"a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\": rpc error: code = NotFound desc = could not find container \"a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\": container with ID starting with a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.814577 4661 scope.go:117] "RemoveContainer" containerID="71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.814898 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca"} err="failed to get container status \"71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\": rpc error: code = NotFound desc = could not find container \"71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\": container with ID starting with 71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.814925 4661 scope.go:117] "RemoveContainer" containerID="6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.815235 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590"} err="failed to get container status \"6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\": rpc error: code = NotFound desc = could not find container \"6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\": container with ID starting with 6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.815257 4661 scope.go:117] "RemoveContainer" containerID="a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.815572 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8"} err="failed to get container status \"a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\": rpc error: code = NotFound desc = could not find container \"a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\": container with ID starting with a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.815598 4661 scope.go:117] "RemoveContainer" containerID="01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.815984 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006"} err="failed to get container status \"01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\": rpc error: code = NotFound desc = could not find container \"01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\": container with ID starting with 01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.816009 4661 scope.go:117] "RemoveContainer" containerID="d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.816313 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e"} err="failed to get container status \"d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e\": rpc error: code = NotFound desc = could not find container \"d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e\": container with ID starting with d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.816365 4661 scope.go:117] "RemoveContainer" containerID="faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.816780 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8"} err="failed to get container status \"faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8\": rpc error: code = NotFound desc = could not find container \"faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8\": container with ID starting with faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.816810 4661 scope.go:117] "RemoveContainer" containerID="4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.817328 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670"} err="failed to get container status \"4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\": rpc error: code = NotFound desc = could not find container \"4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\": container with ID starting with 4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.817389 4661 scope.go:117] "RemoveContainer" containerID="a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.817668 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e"} err="failed to get container status \"a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\": rpc error: code = NotFound desc = could not find container \"a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\": container with ID starting with a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.817697 4661 scope.go:117] "RemoveContainer" containerID="a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.818001 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536"} err="failed to get container status \"a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\": rpc error: code = NotFound desc = could not find container \"a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\": container with ID starting with a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.818049 4661 scope.go:117] "RemoveContainer" containerID="a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.818308 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea"} err="failed to get container status \"a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\": rpc error: code = NotFound desc = could not find container \"a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\": container with ID starting with a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.818331 4661 scope.go:117] "RemoveContainer" containerID="71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.818686 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca"} err="failed to get container status \"71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\": rpc error: code = NotFound desc = could not find container \"71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\": container with ID starting with 71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.818711 4661 scope.go:117] "RemoveContainer" containerID="6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.819033 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590"} err="failed to get container status \"6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\": rpc error: code = NotFound desc = could not find container \"6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\": container with ID starting with 6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.819055 4661 scope.go:117] "RemoveContainer" containerID="a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.819344 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8"} err="failed to get container status \"a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\": rpc error: code = NotFound desc = could not find container \"a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\": container with ID starting with a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.819368 4661 scope.go:117] "RemoveContainer" containerID="01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.819762 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006"} err="failed to get container status \"01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\": rpc error: code = NotFound desc = could not find container \"01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\": container with ID starting with 01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.819782 4661 scope.go:117] "RemoveContainer" containerID="d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.820118 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e"} err="failed to get container status \"d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e\": rpc error: code = NotFound desc = could not find container \"d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e\": container with ID starting with d7a7ce4c35ede099e29155e9d80ea2260189227c3185418e0dd73923a31f710e not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.820144 4661 scope.go:117] "RemoveContainer" containerID="faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.820520 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8"} err="failed to get container status \"faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8\": rpc error: code = NotFound desc = could not find container \"faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8\": container with ID starting with faf1980c03ce3bb80f8a22844c27812246d0700100896ebebc217c36257198e8 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.820560 4661 scope.go:117] "RemoveContainer" containerID="4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.820950 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670"} err="failed to get container status \"4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\": rpc error: code = NotFound desc = could not find container \"4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670\": container with ID starting with 4b83f5e2284ba6d4bba8d60e46298f317cd2dc40b51062ec146a09504cdc6670 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.820974 4661 scope.go:117] "RemoveContainer" containerID="a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.821329 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e"} err="failed to get container status \"a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\": rpc error: code = NotFound desc = could not find container \"a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e\": container with ID starting with a7d0a7ecfc6b309d675d1c1f16efc8d7cd70fbc79d29041454bfc962ad9d083e not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.821349 4661 scope.go:117] "RemoveContainer" containerID="a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.821692 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536"} err="failed to get container status \"a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\": rpc error: code = NotFound desc = could not find container \"a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536\": container with ID starting with a80becb877f5aba0f3dd228ac118f14e76973392c8681e7fbcc7a4585d3fa536 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.821712 4661 scope.go:117] "RemoveContainer" containerID="a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.822080 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea"} err="failed to get container status \"a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\": rpc error: code = NotFound desc = could not find container \"a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea\": container with ID starting with a15ae9fd746a3e4ceb493da69b9ded35b80a7089a15ba5b0e3c7e5f7a0dd24ea not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.822111 4661 scope.go:117] "RemoveContainer" containerID="71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.822441 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca"} err="failed to get container status \"71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\": rpc error: code = NotFound desc = could not find container \"71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca\": container with ID starting with 71f71daa934a98f4fe8514d941c7f3b5f501d33ed703f4d534fcfdea3da13dca not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.822462 4661 scope.go:117] "RemoveContainer" containerID="6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590" Oct 01 05:39:09 crc kubenswrapper[4661]: W1001 05:39:09.822757 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod54eaf765_501e_41a6_b568_8fa2fb344af6.slice/crio-7b4025a8b6212bfc903bc6d14fe07d852b723f6718850b94a01cf3847c1249b0 WatchSource:0}: Error finding container 7b4025a8b6212bfc903bc6d14fe07d852b723f6718850b94a01cf3847c1249b0: Status 404 returned error can't find the container with id 7b4025a8b6212bfc903bc6d14fe07d852b723f6718850b94a01cf3847c1249b0 Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.822898 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590"} err="failed to get container status \"6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\": rpc error: code = NotFound desc = could not find container \"6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590\": container with ID starting with 6f66c382e015246f895f76642e07d39fa306aa817727c9dc8230c581bfb98590 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.822929 4661 scope.go:117] "RemoveContainer" containerID="a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.823413 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8"} err="failed to get container status \"a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\": rpc error: code = NotFound desc = could not find container \"a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8\": container with ID starting with a398e5d603cf10be59cc45785d7207f48a9f12d8042e74ab9b315b45a3bd6bf8 not found: ID does not exist" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.823448 4661 scope.go:117] "RemoveContainer" containerID="01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006" Oct 01 05:39:09 crc kubenswrapper[4661]: I1001 05:39:09.823843 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006"} err="failed to get container status \"01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\": rpc error: code = NotFound desc = could not find container \"01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006\": container with ID starting with 01f26c24704e7c02287f65837abd59ad6ffe77327bb4e5a314f3d7578ee33006 not found: ID does not exist" Oct 01 05:39:10 crc kubenswrapper[4661]: I1001 05:39:10.596437 4661 generic.go:334] "Generic (PLEG): container finished" podID="54eaf765-501e-41a6-b568-8fa2fb344af6" containerID="d4e8cfe5d558013790d8fdf85ac1b561a84950600ddec675071a2c642c2eda99" exitCode=0 Oct 01 05:39:10 crc kubenswrapper[4661]: I1001 05:39:10.596498 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pl295" event={"ID":"54eaf765-501e-41a6-b568-8fa2fb344af6","Type":"ContainerDied","Data":"d4e8cfe5d558013790d8fdf85ac1b561a84950600ddec675071a2c642c2eda99"} Oct 01 05:39:10 crc kubenswrapper[4661]: I1001 05:39:10.596961 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pl295" event={"ID":"54eaf765-501e-41a6-b568-8fa2fb344af6","Type":"ContainerStarted","Data":"7b4025a8b6212bfc903bc6d14fe07d852b723f6718850b94a01cf3847c1249b0"} Oct 01 05:39:10 crc kubenswrapper[4661]: I1001 05:39:10.600071 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l96mp_dc3b0e2f-f27e-4420-9323-ec45878c11a6/kube-multus/2.log" Oct 01 05:39:11 crc kubenswrapper[4661]: I1001 05:39:11.610172 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pl295" event={"ID":"54eaf765-501e-41a6-b568-8fa2fb344af6","Type":"ContainerStarted","Data":"c716ca1221ef5d6c908fe435c5c20d9188c08f9c0e74e534dd335130f477590e"} Oct 01 05:39:11 crc kubenswrapper[4661]: I1001 05:39:11.610573 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pl295" event={"ID":"54eaf765-501e-41a6-b568-8fa2fb344af6","Type":"ContainerStarted","Data":"6f3dd74f14b20b6c9d8a2b19a2e5ebd973a391863de8c662d2e8f06755ed9b5b"} Oct 01 05:39:11 crc kubenswrapper[4661]: I1001 05:39:11.610594 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pl295" event={"ID":"54eaf765-501e-41a6-b568-8fa2fb344af6","Type":"ContainerStarted","Data":"1c952b2789c3e05a3d292936424f4fbe91eb846bf5631cca4865e822380abe1b"} Oct 01 05:39:11 crc kubenswrapper[4661]: I1001 05:39:11.610614 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pl295" event={"ID":"54eaf765-501e-41a6-b568-8fa2fb344af6","Type":"ContainerStarted","Data":"3454cec8b9ec7019e16dcced9b4e89412678b5131fe339ffac51baa0643a7f21"} Oct 01 05:39:11 crc kubenswrapper[4661]: I1001 05:39:11.610657 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pl295" event={"ID":"54eaf765-501e-41a6-b568-8fa2fb344af6","Type":"ContainerStarted","Data":"b3da68735d87ba720a881ec72a491f87baca638f529db549a8d929bf2be319ba"} Oct 01 05:39:11 crc kubenswrapper[4661]: I1001 05:39:11.610677 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pl295" event={"ID":"54eaf765-501e-41a6-b568-8fa2fb344af6","Type":"ContainerStarted","Data":"23f2a3c97efdbb00fa951b1ee6c01129a22806d97cb37f209038e129d0b7ff19"} Oct 01 05:39:11 crc kubenswrapper[4661]: I1001 05:39:11.980366 4661 scope.go:117] "RemoveContainer" containerID="a5e836c818ddf9291c9a2972738fd569913752dfb6349b32fdfc3aaafa6ac9c9" Oct 01 05:39:12 crc kubenswrapper[4661]: I1001 05:39:12.007064 4661 scope.go:117] "RemoveContainer" containerID="2e725190dddcf8ec5d2fd0f021fc3ed49797c1c6de11829a3c5ef5d01325b20d" Oct 01 05:39:14 crc kubenswrapper[4661]: I1001 05:39:14.640325 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pl295" event={"ID":"54eaf765-501e-41a6-b568-8fa2fb344af6","Type":"ContainerStarted","Data":"3dc12c71b378ab9d586432379605dc72a13e1a922848059d4dab0be797a4bc5b"} Oct 01 05:39:16 crc kubenswrapper[4661]: I1001 05:39:16.656034 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pl295" event={"ID":"54eaf765-501e-41a6-b568-8fa2fb344af6","Type":"ContainerStarted","Data":"2b21698b63bc422e675980d4424033b9042554d31f7c241ad56aa36aaa7d9c44"} Oct 01 05:39:16 crc kubenswrapper[4661]: I1001 05:39:16.656442 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:16 crc kubenswrapper[4661]: I1001 05:39:16.656467 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:16 crc kubenswrapper[4661]: I1001 05:39:16.656484 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:16 crc kubenswrapper[4661]: I1001 05:39:16.683273 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:16 crc kubenswrapper[4661]: I1001 05:39:16.685265 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:16 crc kubenswrapper[4661]: I1001 05:39:16.688486 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-pl295" podStartSLOduration=7.688475135 podStartE2EDuration="7.688475135s" podCreationTimestamp="2025-10-01 05:39:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:39:16.68722181 +0000 UTC m=+605.625200434" watchObservedRunningTime="2025-10-01 05:39:16.688475135 +0000 UTC m=+605.626453759" Oct 01 05:39:20 crc kubenswrapper[4661]: I1001 05:39:20.757070 4661 scope.go:117] "RemoveContainer" containerID="c4f57a5e14aa5a8c99b44e2c1f2de034c084a8ede520375e6f4bc54461906a3e" Oct 01 05:39:20 crc kubenswrapper[4661]: E1001 05:39:20.757913 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-l96mp_openshift-multus(dc3b0e2f-f27e-4420-9323-ec45878c11a6)\"" pod="openshift-multus/multus-l96mp" podUID="dc3b0e2f-f27e-4420-9323-ec45878c11a6" Oct 01 05:39:33 crc kubenswrapper[4661]: I1001 05:39:33.757426 4661 scope.go:117] "RemoveContainer" containerID="c4f57a5e14aa5a8c99b44e2c1f2de034c084a8ede520375e6f4bc54461906a3e" Oct 01 05:39:34 crc kubenswrapper[4661]: I1001 05:39:34.778986 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-l96mp_dc3b0e2f-f27e-4420-9323-ec45878c11a6/kube-multus/2.log" Oct 01 05:39:34 crc kubenswrapper[4661]: I1001 05:39:34.779437 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-l96mp" event={"ID":"dc3b0e2f-f27e-4420-9323-ec45878c11a6","Type":"ContainerStarted","Data":"a08dc8390b44273b773a605c53c07974abfd549f03e121be2f3d0a08decaa9dc"} Oct 01 05:39:36 crc kubenswrapper[4661]: I1001 05:39:36.297344 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk"] Oct 01 05:39:36 crc kubenswrapper[4661]: I1001 05:39:36.299311 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk" Oct 01 05:39:36 crc kubenswrapper[4661]: I1001 05:39:36.302880 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 01 05:39:36 crc kubenswrapper[4661]: I1001 05:39:36.314513 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk"] Oct 01 05:39:36 crc kubenswrapper[4661]: I1001 05:39:36.424056 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mq4wd\" (UniqueName: \"kubernetes.io/projected/a5baaec9-0520-4cc9-9056-f4729497a734-kube-api-access-mq4wd\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk\" (UID: \"a5baaec9-0520-4cc9-9056-f4729497a734\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk" Oct 01 05:39:36 crc kubenswrapper[4661]: I1001 05:39:36.424094 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a5baaec9-0520-4cc9-9056-f4729497a734-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk\" (UID: \"a5baaec9-0520-4cc9-9056-f4729497a734\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk" Oct 01 05:39:36 crc kubenswrapper[4661]: I1001 05:39:36.424138 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a5baaec9-0520-4cc9-9056-f4729497a734-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk\" (UID: \"a5baaec9-0520-4cc9-9056-f4729497a734\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk" Oct 01 05:39:36 crc kubenswrapper[4661]: I1001 05:39:36.525065 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a5baaec9-0520-4cc9-9056-f4729497a734-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk\" (UID: \"a5baaec9-0520-4cc9-9056-f4729497a734\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk" Oct 01 05:39:36 crc kubenswrapper[4661]: I1001 05:39:36.525187 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a5baaec9-0520-4cc9-9056-f4729497a734-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk\" (UID: \"a5baaec9-0520-4cc9-9056-f4729497a734\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk" Oct 01 05:39:36 crc kubenswrapper[4661]: I1001 05:39:36.525297 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mq4wd\" (UniqueName: \"kubernetes.io/projected/a5baaec9-0520-4cc9-9056-f4729497a734-kube-api-access-mq4wd\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk\" (UID: \"a5baaec9-0520-4cc9-9056-f4729497a734\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk" Oct 01 05:39:36 crc kubenswrapper[4661]: I1001 05:39:36.525946 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a5baaec9-0520-4cc9-9056-f4729497a734-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk\" (UID: \"a5baaec9-0520-4cc9-9056-f4729497a734\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk" Oct 01 05:39:36 crc kubenswrapper[4661]: I1001 05:39:36.525946 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a5baaec9-0520-4cc9-9056-f4729497a734-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk\" (UID: \"a5baaec9-0520-4cc9-9056-f4729497a734\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk" Oct 01 05:39:36 crc kubenswrapper[4661]: I1001 05:39:36.557831 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mq4wd\" (UniqueName: \"kubernetes.io/projected/a5baaec9-0520-4cc9-9056-f4729497a734-kube-api-access-mq4wd\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk\" (UID: \"a5baaec9-0520-4cc9-9056-f4729497a734\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk" Oct 01 05:39:36 crc kubenswrapper[4661]: I1001 05:39:36.630503 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk" Oct 01 05:39:36 crc kubenswrapper[4661]: I1001 05:39:36.913043 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk"] Oct 01 05:39:36 crc kubenswrapper[4661]: W1001 05:39:36.922213 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda5baaec9_0520_4cc9_9056_f4729497a734.slice/crio-357a7df177c7f9205a9330994c1a52a47e6afbf212d20e05ec35cb852c438627 WatchSource:0}: Error finding container 357a7df177c7f9205a9330994c1a52a47e6afbf212d20e05ec35cb852c438627: Status 404 returned error can't find the container with id 357a7df177c7f9205a9330994c1a52a47e6afbf212d20e05ec35cb852c438627 Oct 01 05:39:37 crc kubenswrapper[4661]: I1001 05:39:37.803747 4661 generic.go:334] "Generic (PLEG): container finished" podID="a5baaec9-0520-4cc9-9056-f4729497a734" containerID="05f48da25a332efcfffa0e59da60b5d966cc1946569a8a2eff2bc9db3257216f" exitCode=0 Oct 01 05:39:37 crc kubenswrapper[4661]: I1001 05:39:37.803993 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk" event={"ID":"a5baaec9-0520-4cc9-9056-f4729497a734","Type":"ContainerDied","Data":"05f48da25a332efcfffa0e59da60b5d966cc1946569a8a2eff2bc9db3257216f"} Oct 01 05:39:37 crc kubenswrapper[4661]: I1001 05:39:37.804175 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk" event={"ID":"a5baaec9-0520-4cc9-9056-f4729497a734","Type":"ContainerStarted","Data":"357a7df177c7f9205a9330994c1a52a47e6afbf212d20e05ec35cb852c438627"} Oct 01 05:39:39 crc kubenswrapper[4661]: I1001 05:39:39.797661 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pl295" Oct 01 05:39:39 crc kubenswrapper[4661]: I1001 05:39:39.845095 4661 generic.go:334] "Generic (PLEG): container finished" podID="a5baaec9-0520-4cc9-9056-f4729497a734" containerID="adf332ae109147120b453ad471edc3a50aa8307cddaf661f835da48c4ba3d0bd" exitCode=0 Oct 01 05:39:39 crc kubenswrapper[4661]: I1001 05:39:39.845157 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk" event={"ID":"a5baaec9-0520-4cc9-9056-f4729497a734","Type":"ContainerDied","Data":"adf332ae109147120b453ad471edc3a50aa8307cddaf661f835da48c4ba3d0bd"} Oct 01 05:39:40 crc kubenswrapper[4661]: I1001 05:39:40.855018 4661 generic.go:334] "Generic (PLEG): container finished" podID="a5baaec9-0520-4cc9-9056-f4729497a734" containerID="f8c5207f447e08deacc20e149f45514dad46e8fd10dfab4a06f6e76c9c05f2fb" exitCode=0 Oct 01 05:39:40 crc kubenswrapper[4661]: I1001 05:39:40.855166 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk" event={"ID":"a5baaec9-0520-4cc9-9056-f4729497a734","Type":"ContainerDied","Data":"f8c5207f447e08deacc20e149f45514dad46e8fd10dfab4a06f6e76c9c05f2fb"} Oct 01 05:39:42 crc kubenswrapper[4661]: I1001 05:39:42.155565 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk" Oct 01 05:39:42 crc kubenswrapper[4661]: I1001 05:39:42.310689 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a5baaec9-0520-4cc9-9056-f4729497a734-util\") pod \"a5baaec9-0520-4cc9-9056-f4729497a734\" (UID: \"a5baaec9-0520-4cc9-9056-f4729497a734\") " Oct 01 05:39:42 crc kubenswrapper[4661]: I1001 05:39:42.310911 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a5baaec9-0520-4cc9-9056-f4729497a734-bundle\") pod \"a5baaec9-0520-4cc9-9056-f4729497a734\" (UID: \"a5baaec9-0520-4cc9-9056-f4729497a734\") " Oct 01 05:39:42 crc kubenswrapper[4661]: I1001 05:39:42.311163 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mq4wd\" (UniqueName: \"kubernetes.io/projected/a5baaec9-0520-4cc9-9056-f4729497a734-kube-api-access-mq4wd\") pod \"a5baaec9-0520-4cc9-9056-f4729497a734\" (UID: \"a5baaec9-0520-4cc9-9056-f4729497a734\") " Oct 01 05:39:42 crc kubenswrapper[4661]: I1001 05:39:42.314730 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5baaec9-0520-4cc9-9056-f4729497a734-bundle" (OuterVolumeSpecName: "bundle") pod "a5baaec9-0520-4cc9-9056-f4729497a734" (UID: "a5baaec9-0520-4cc9-9056-f4729497a734"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:39:42 crc kubenswrapper[4661]: I1001 05:39:42.319888 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5baaec9-0520-4cc9-9056-f4729497a734-kube-api-access-mq4wd" (OuterVolumeSpecName: "kube-api-access-mq4wd") pod "a5baaec9-0520-4cc9-9056-f4729497a734" (UID: "a5baaec9-0520-4cc9-9056-f4729497a734"). InnerVolumeSpecName "kube-api-access-mq4wd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:39:42 crc kubenswrapper[4661]: I1001 05:39:42.413019 4661 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a5baaec9-0520-4cc9-9056-f4729497a734-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:42 crc kubenswrapper[4661]: I1001 05:39:42.413068 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mq4wd\" (UniqueName: \"kubernetes.io/projected/a5baaec9-0520-4cc9-9056-f4729497a734-kube-api-access-mq4wd\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:42 crc kubenswrapper[4661]: I1001 05:39:42.667237 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5baaec9-0520-4cc9-9056-f4729497a734-util" (OuterVolumeSpecName: "util") pod "a5baaec9-0520-4cc9-9056-f4729497a734" (UID: "a5baaec9-0520-4cc9-9056-f4729497a734"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:39:42 crc kubenswrapper[4661]: I1001 05:39:42.718830 4661 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a5baaec9-0520-4cc9-9056-f4729497a734-util\") on node \"crc\" DevicePath \"\"" Oct 01 05:39:42 crc kubenswrapper[4661]: I1001 05:39:42.871701 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk" event={"ID":"a5baaec9-0520-4cc9-9056-f4729497a734","Type":"ContainerDied","Data":"357a7df177c7f9205a9330994c1a52a47e6afbf212d20e05ec35cb852c438627"} Oct 01 05:39:42 crc kubenswrapper[4661]: I1001 05:39:42.871774 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk" Oct 01 05:39:42 crc kubenswrapper[4661]: I1001 05:39:42.871791 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="357a7df177c7f9205a9330994c1a52a47e6afbf212d20e05ec35cb852c438627" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.639586 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-8qwx2"] Oct 01 05:39:53 crc kubenswrapper[4661]: E1001 05:39:53.640048 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5baaec9-0520-4cc9-9056-f4729497a734" containerName="extract" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.640059 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5baaec9-0520-4cc9-9056-f4729497a734" containerName="extract" Oct 01 05:39:53 crc kubenswrapper[4661]: E1001 05:39:53.640067 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5baaec9-0520-4cc9-9056-f4729497a734" containerName="pull" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.640072 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5baaec9-0520-4cc9-9056-f4729497a734" containerName="pull" Oct 01 05:39:53 crc kubenswrapper[4661]: E1001 05:39:53.640083 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5baaec9-0520-4cc9-9056-f4729497a734" containerName="util" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.640089 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5baaec9-0520-4cc9-9056-f4729497a734" containerName="util" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.640175 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5baaec9-0520-4cc9-9056-f4729497a734" containerName="extract" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.640487 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-8qwx2" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.643143 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-t9zjv" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.643422 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.644466 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.667393 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-8qwx2"] Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.755684 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-gnmb5"] Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.756372 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-gnmb5" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.758226 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.766371 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-wfszf"] Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.767039 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-wfszf" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.782954 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-2fdfx" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.787967 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-wfszf"] Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.788449 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hng2\" (UniqueName: \"kubernetes.io/projected/e4e8d27f-fbab-4c45-b182-73df1cef9061-kube-api-access-2hng2\") pod \"obo-prometheus-operator-7c8cf85677-8qwx2\" (UID: \"e4e8d27f-fbab-4c45-b182-73df1cef9061\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-8qwx2" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.808876 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-gnmb5"] Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.881512 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-scssh"] Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.882120 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-scssh" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.885030 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.890877 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cb7f3b2f-e219-4709-85e3-8b1df4b288bd-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6d9594f8bb-wfszf\" (UID: \"cb7f3b2f-e219-4709-85e3-8b1df4b288bd\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-wfszf" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.891154 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hng2\" (UniqueName: \"kubernetes.io/projected/e4e8d27f-fbab-4c45-b182-73df1cef9061-kube-api-access-2hng2\") pod \"obo-prometheus-operator-7c8cf85677-8qwx2\" (UID: \"e4e8d27f-fbab-4c45-b182-73df1cef9061\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-8qwx2" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.893010 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cb7f3b2f-e219-4709-85e3-8b1df4b288bd-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6d9594f8bb-wfszf\" (UID: \"cb7f3b2f-e219-4709-85e3-8b1df4b288bd\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-wfszf" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.893034 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6f995c9b-5efa-4d54-be17-7d67186446c3-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6d9594f8bb-gnmb5\" (UID: \"6f995c9b-5efa-4d54-be17-7d67186446c3\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-gnmb5" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.893061 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6f995c9b-5efa-4d54-be17-7d67186446c3-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6d9594f8bb-gnmb5\" (UID: \"6f995c9b-5efa-4d54-be17-7d67186446c3\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-gnmb5" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.892943 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-vz495" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.929405 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hng2\" (UniqueName: \"kubernetes.io/projected/e4e8d27f-fbab-4c45-b182-73df1cef9061-kube-api-access-2hng2\") pod \"obo-prometheus-operator-7c8cf85677-8qwx2\" (UID: \"e4e8d27f-fbab-4c45-b182-73df1cef9061\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-8qwx2" Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.950970 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-scssh"] Oct 01 05:39:53 crc kubenswrapper[4661]: I1001 05:39:53.958787 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-8qwx2" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:53.996169 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cb7f3b2f-e219-4709-85e3-8b1df4b288bd-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6d9594f8bb-wfszf\" (UID: \"cb7f3b2f-e219-4709-85e3-8b1df4b288bd\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-wfszf" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:53.996213 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6f995c9b-5efa-4d54-be17-7d67186446c3-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6d9594f8bb-gnmb5\" (UID: \"6f995c9b-5efa-4d54-be17-7d67186446c3\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-gnmb5" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:53.996249 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/a7062884-4a27-4396-b3f5-698aceda68d2-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-scssh\" (UID: \"a7062884-4a27-4396-b3f5-698aceda68d2\") " pod="openshift-operators/observability-operator-cc5f78dfc-scssh" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:53.996269 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6f995c9b-5efa-4d54-be17-7d67186446c3-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6d9594f8bb-gnmb5\" (UID: \"6f995c9b-5efa-4d54-be17-7d67186446c3\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-gnmb5" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:53.996328 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cb7f3b2f-e219-4709-85e3-8b1df4b288bd-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6d9594f8bb-wfszf\" (UID: \"cb7f3b2f-e219-4709-85e3-8b1df4b288bd\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-wfszf" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:53.996350 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rk7tp\" (UniqueName: \"kubernetes.io/projected/a7062884-4a27-4396-b3f5-698aceda68d2-kube-api-access-rk7tp\") pod \"observability-operator-cc5f78dfc-scssh\" (UID: \"a7062884-4a27-4396-b3f5-698aceda68d2\") " pod="openshift-operators/observability-operator-cc5f78dfc-scssh" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.005118 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cb7f3b2f-e219-4709-85e3-8b1df4b288bd-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6d9594f8bb-wfszf\" (UID: \"cb7f3b2f-e219-4709-85e3-8b1df4b288bd\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-wfszf" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.005118 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6f995c9b-5efa-4d54-be17-7d67186446c3-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6d9594f8bb-gnmb5\" (UID: \"6f995c9b-5efa-4d54-be17-7d67186446c3\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-gnmb5" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.008658 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6f995c9b-5efa-4d54-be17-7d67186446c3-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6d9594f8bb-gnmb5\" (UID: \"6f995c9b-5efa-4d54-be17-7d67186446c3\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-gnmb5" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.018050 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cb7f3b2f-e219-4709-85e3-8b1df4b288bd-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6d9594f8bb-wfszf\" (UID: \"cb7f3b2f-e219-4709-85e3-8b1df4b288bd\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-wfszf" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.069817 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-gnmb5" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.089121 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-pnpg7"] Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.089949 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-wfszf" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.090892 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-pnpg7" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.098008 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rk7tp\" (UniqueName: \"kubernetes.io/projected/a7062884-4a27-4396-b3f5-698aceda68d2-kube-api-access-rk7tp\") pod \"observability-operator-cc5f78dfc-scssh\" (UID: \"a7062884-4a27-4396-b3f5-698aceda68d2\") " pod="openshift-operators/observability-operator-cc5f78dfc-scssh" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.098047 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/a7062884-4a27-4396-b3f5-698aceda68d2-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-scssh\" (UID: \"a7062884-4a27-4396-b3f5-698aceda68d2\") " pod="openshift-operators/observability-operator-cc5f78dfc-scssh" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.099271 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-dm6fr" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.105572 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/a7062884-4a27-4396-b3f5-698aceda68d2-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-scssh\" (UID: \"a7062884-4a27-4396-b3f5-698aceda68d2\") " pod="openshift-operators/observability-operator-cc5f78dfc-scssh" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.116040 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-pnpg7"] Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.117812 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rk7tp\" (UniqueName: \"kubernetes.io/projected/a7062884-4a27-4396-b3f5-698aceda68d2-kube-api-access-rk7tp\") pod \"observability-operator-cc5f78dfc-scssh\" (UID: \"a7062884-4a27-4396-b3f5-698aceda68d2\") " pod="openshift-operators/observability-operator-cc5f78dfc-scssh" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.194916 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-scssh" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.198986 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdk5x\" (UniqueName: \"kubernetes.io/projected/58ef1745-3b19-4508-b099-100418c1a6d7-kube-api-access-bdk5x\") pod \"perses-operator-54bc95c9fb-pnpg7\" (UID: \"58ef1745-3b19-4508-b099-100418c1a6d7\") " pod="openshift-operators/perses-operator-54bc95c9fb-pnpg7" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.199053 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/58ef1745-3b19-4508-b099-100418c1a6d7-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-pnpg7\" (UID: \"58ef1745-3b19-4508-b099-100418c1a6d7\") " pod="openshift-operators/perses-operator-54bc95c9fb-pnpg7" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.300382 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdk5x\" (UniqueName: \"kubernetes.io/projected/58ef1745-3b19-4508-b099-100418c1a6d7-kube-api-access-bdk5x\") pod \"perses-operator-54bc95c9fb-pnpg7\" (UID: \"58ef1745-3b19-4508-b099-100418c1a6d7\") " pod="openshift-operators/perses-operator-54bc95c9fb-pnpg7" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.300472 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/58ef1745-3b19-4508-b099-100418c1a6d7-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-pnpg7\" (UID: \"58ef1745-3b19-4508-b099-100418c1a6d7\") " pod="openshift-operators/perses-operator-54bc95c9fb-pnpg7" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.301266 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/58ef1745-3b19-4508-b099-100418c1a6d7-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-pnpg7\" (UID: \"58ef1745-3b19-4508-b099-100418c1a6d7\") " pod="openshift-operators/perses-operator-54bc95c9fb-pnpg7" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.324255 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-gnmb5"] Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.325553 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdk5x\" (UniqueName: \"kubernetes.io/projected/58ef1745-3b19-4508-b099-100418c1a6d7-kube-api-access-bdk5x\") pod \"perses-operator-54bc95c9fb-pnpg7\" (UID: \"58ef1745-3b19-4508-b099-100418c1a6d7\") " pod="openshift-operators/perses-operator-54bc95c9fb-pnpg7" Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.439922 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-8qwx2"] Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.448861 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-pnpg7" Oct 01 05:39:54 crc kubenswrapper[4661]: W1001 05:39:54.449108 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4e8d27f_fbab_4c45_b182_73df1cef9061.slice/crio-2cbc74292c4e53fbd280cb65f864393f6bf6cdd4b74eb0462c4f8c820465cf27 WatchSource:0}: Error finding container 2cbc74292c4e53fbd280cb65f864393f6bf6cdd4b74eb0462c4f8c820465cf27: Status 404 returned error can't find the container with id 2cbc74292c4e53fbd280cb65f864393f6bf6cdd4b74eb0462c4f8c820465cf27 Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.484960 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-scssh"] Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.601185 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-wfszf"] Oct 01 05:39:54 crc kubenswrapper[4661]: W1001 05:39:54.605707 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcb7f3b2f_e219_4709_85e3_8b1df4b288bd.slice/crio-2cbf9a72575fef04aac8e64a03d33572c96fe0d8bf406b4118ac4e12b6f33bd5 WatchSource:0}: Error finding container 2cbf9a72575fef04aac8e64a03d33572c96fe0d8bf406b4118ac4e12b6f33bd5: Status 404 returned error can't find the container with id 2cbf9a72575fef04aac8e64a03d33572c96fe0d8bf406b4118ac4e12b6f33bd5 Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.626309 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-pnpg7"] Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.945332 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-scssh" event={"ID":"a7062884-4a27-4396-b3f5-698aceda68d2","Type":"ContainerStarted","Data":"1bbc08292fc76da847b5ce250c4916e739871d3d2ce1fbbdbf7c99e6a0da6ac7"} Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.946203 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-gnmb5" event={"ID":"6f995c9b-5efa-4d54-be17-7d67186446c3","Type":"ContainerStarted","Data":"67a081fad9789237ad7ac873070da5eb950426d1249a4e7705ff34f2da8328d4"} Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.947038 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-pnpg7" event={"ID":"58ef1745-3b19-4508-b099-100418c1a6d7","Type":"ContainerStarted","Data":"1cb133277702c1dd277d8ca9afb7e6ea956397a8c250273b5c4ae4fbe1b0b483"} Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.947822 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-wfszf" event={"ID":"cb7f3b2f-e219-4709-85e3-8b1df4b288bd","Type":"ContainerStarted","Data":"2cbf9a72575fef04aac8e64a03d33572c96fe0d8bf406b4118ac4e12b6f33bd5"} Oct 01 05:39:54 crc kubenswrapper[4661]: I1001 05:39:54.948570 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-8qwx2" event={"ID":"e4e8d27f-fbab-4c45-b182-73df1cef9061","Type":"ContainerStarted","Data":"2cbc74292c4e53fbd280cb65f864393f6bf6cdd4b74eb0462c4f8c820465cf27"} Oct 01 05:40:07 crc kubenswrapper[4661]: E1001 05:40:07.884282 4661 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/perses-0-1-rhel9-operator@sha256:bfed9f442aea6e8165644f1dc615beea06ec7fd84ea3f8ca393a63d3627c6a7c" Oct 01 05:40:07 crc kubenswrapper[4661]: E1001 05:40:07.885116 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:perses-operator,Image:registry.redhat.io/cluster-observability-operator/perses-0-1-rhel9-operator@sha256:bfed9f442aea6e8165644f1dc615beea06ec7fd84ea3f8ca393a63d3627c6a7c,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.2.2,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{134217728 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openshift-service-ca,ReadOnly:true,MountPath:/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bdk5x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod perses-operator-54bc95c9fb-pnpg7_openshift-operators(58ef1745-3b19-4508-b099-100418c1a6d7): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 05:40:07 crc kubenswrapper[4661]: E1001 05:40:07.886327 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/perses-operator-54bc95c9fb-pnpg7" podUID="58ef1745-3b19-4508-b099-100418c1a6d7" Oct 01 05:40:08 crc kubenswrapper[4661]: E1001 05:40:08.034473 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/perses-0-1-rhel9-operator@sha256:bfed9f442aea6e8165644f1dc615beea06ec7fd84ea3f8ca393a63d3627c6a7c\\\"\"" pod="openshift-operators/perses-operator-54bc95c9fb-pnpg7" podUID="58ef1745-3b19-4508-b099-100418c1a6d7" Oct 01 05:40:09 crc kubenswrapper[4661]: I1001 05:40:09.039436 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-scssh" event={"ID":"a7062884-4a27-4396-b3f5-698aceda68d2","Type":"ContainerStarted","Data":"d6e5af0db832ca8fd420c7b43a3a74b186f69c5768d941091771d1c83be5b9d7"} Oct 01 05:40:09 crc kubenswrapper[4661]: I1001 05:40:09.039738 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-cc5f78dfc-scssh" Oct 01 05:40:09 crc kubenswrapper[4661]: I1001 05:40:09.042000 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-gnmb5" event={"ID":"6f995c9b-5efa-4d54-be17-7d67186446c3","Type":"ContainerStarted","Data":"9e0df7e000b39abb60f128e891a524e11db744d5e339f516b615227b1dc7811a"} Oct 01 05:40:09 crc kubenswrapper[4661]: I1001 05:40:09.043756 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-wfszf" event={"ID":"cb7f3b2f-e219-4709-85e3-8b1df4b288bd","Type":"ContainerStarted","Data":"d59654361f7f100c36261e1e3fbb37a2b0d75dc5fe3e665643894fea5625a2e1"} Oct 01 05:40:09 crc kubenswrapper[4661]: I1001 05:40:09.045177 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-8qwx2" event={"ID":"e4e8d27f-fbab-4c45-b182-73df1cef9061","Type":"ContainerStarted","Data":"4bea9fca926e053cfcfc7884d45bb36777c035c0ae0d8a67877aa8a11f50dfa0"} Oct 01 05:40:09 crc kubenswrapper[4661]: I1001 05:40:09.072604 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-cc5f78dfc-scssh" podStartSLOduration=2.561241741 podStartE2EDuration="16.072586071s" podCreationTimestamp="2025-10-01 05:39:53 +0000 UTC" firstStartedPulling="2025-10-01 05:39:54.50099279 +0000 UTC m=+643.438971404" lastFinishedPulling="2025-10-01 05:40:08.01233706 +0000 UTC m=+656.950315734" observedRunningTime="2025-10-01 05:40:09.070672467 +0000 UTC m=+658.008651081" watchObservedRunningTime="2025-10-01 05:40:09.072586071 +0000 UTC m=+658.010564685" Oct 01 05:40:09 crc kubenswrapper[4661]: I1001 05:40:09.094861 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-wfszf" podStartSLOduration=2.701833166 podStartE2EDuration="16.094845161s" podCreationTimestamp="2025-10-01 05:39:53 +0000 UTC" firstStartedPulling="2025-10-01 05:39:54.611841434 +0000 UTC m=+643.549820048" lastFinishedPulling="2025-10-01 05:40:08.004853389 +0000 UTC m=+656.942832043" observedRunningTime="2025-10-01 05:40:09.093095071 +0000 UTC m=+658.031073685" watchObservedRunningTime="2025-10-01 05:40:09.094845161 +0000 UTC m=+658.032823775" Oct 01 05:40:09 crc kubenswrapper[4661]: I1001 05:40:09.120019 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-cc5f78dfc-scssh" Oct 01 05:40:09 crc kubenswrapper[4661]: I1001 05:40:09.151217 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-8qwx2" podStartSLOduration=2.648142778 podStartE2EDuration="16.151197846s" podCreationTimestamp="2025-10-01 05:39:53 +0000 UTC" firstStartedPulling="2025-10-01 05:39:54.451210039 +0000 UTC m=+643.389188643" lastFinishedPulling="2025-10-01 05:40:07.954265097 +0000 UTC m=+656.892243711" observedRunningTime="2025-10-01 05:40:09.115800404 +0000 UTC m=+658.053779028" watchObservedRunningTime="2025-10-01 05:40:09.151197846 +0000 UTC m=+658.089176470" Oct 01 05:40:09 crc kubenswrapper[4661]: I1001 05:40:09.153996 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d9594f8bb-gnmb5" podStartSLOduration=2.531776539 podStartE2EDuration="16.153982895s" podCreationTimestamp="2025-10-01 05:39:53 +0000 UTC" firstStartedPulling="2025-10-01 05:39:54.348865467 +0000 UTC m=+643.286844081" lastFinishedPulling="2025-10-01 05:40:07.971071823 +0000 UTC m=+656.909050437" observedRunningTime="2025-10-01 05:40:09.149906429 +0000 UTC m=+658.087885113" watchObservedRunningTime="2025-10-01 05:40:09.153982895 +0000 UTC m=+658.091961519" Oct 01 05:40:20 crc kubenswrapper[4661]: I1001 05:40:20.115794 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-pnpg7" event={"ID":"58ef1745-3b19-4508-b099-100418c1a6d7","Type":"ContainerStarted","Data":"f17ba1bf92faaf88888a92d5f27728c0e11f62b152783c9d8972f767edc70088"} Oct 01 05:40:20 crc kubenswrapper[4661]: I1001 05:40:20.117623 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-54bc95c9fb-pnpg7" Oct 01 05:40:20 crc kubenswrapper[4661]: I1001 05:40:20.141676 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-54bc95c9fb-pnpg7" podStartSLOduration=1.176816277 podStartE2EDuration="26.141656471s" podCreationTimestamp="2025-10-01 05:39:54 +0000 UTC" firstStartedPulling="2025-10-01 05:39:54.633815967 +0000 UTC m=+643.571794581" lastFinishedPulling="2025-10-01 05:40:19.598656141 +0000 UTC m=+668.536634775" observedRunningTime="2025-10-01 05:40:20.141524437 +0000 UTC m=+669.079503051" watchObservedRunningTime="2025-10-01 05:40:20.141656471 +0000 UTC m=+669.079635085" Oct 01 05:40:24 crc kubenswrapper[4661]: I1001 05:40:24.452187 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-54bc95c9fb-pnpg7" Oct 01 05:40:43 crc kubenswrapper[4661]: I1001 05:40:43.023125 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk"] Oct 01 05:40:43 crc kubenswrapper[4661]: I1001 05:40:43.025371 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk" Oct 01 05:40:43 crc kubenswrapper[4661]: I1001 05:40:43.027163 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 01 05:40:43 crc kubenswrapper[4661]: I1001 05:40:43.032323 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk"] Oct 01 05:40:43 crc kubenswrapper[4661]: I1001 05:40:43.126557 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a49ff2d-3e51-4e56-9062-58c965ab69a3-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk\" (UID: \"4a49ff2d-3e51-4e56-9062-58c965ab69a3\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk" Oct 01 05:40:43 crc kubenswrapper[4661]: I1001 05:40:43.126610 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a49ff2d-3e51-4e56-9062-58c965ab69a3-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk\" (UID: \"4a49ff2d-3e51-4e56-9062-58c965ab69a3\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk" Oct 01 05:40:43 crc kubenswrapper[4661]: I1001 05:40:43.126748 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8mqq\" (UniqueName: \"kubernetes.io/projected/4a49ff2d-3e51-4e56-9062-58c965ab69a3-kube-api-access-z8mqq\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk\" (UID: \"4a49ff2d-3e51-4e56-9062-58c965ab69a3\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk" Oct 01 05:40:43 crc kubenswrapper[4661]: I1001 05:40:43.227701 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a49ff2d-3e51-4e56-9062-58c965ab69a3-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk\" (UID: \"4a49ff2d-3e51-4e56-9062-58c965ab69a3\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk" Oct 01 05:40:43 crc kubenswrapper[4661]: I1001 05:40:43.227753 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a49ff2d-3e51-4e56-9062-58c965ab69a3-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk\" (UID: \"4a49ff2d-3e51-4e56-9062-58c965ab69a3\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk" Oct 01 05:40:43 crc kubenswrapper[4661]: I1001 05:40:43.227799 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8mqq\" (UniqueName: \"kubernetes.io/projected/4a49ff2d-3e51-4e56-9062-58c965ab69a3-kube-api-access-z8mqq\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk\" (UID: \"4a49ff2d-3e51-4e56-9062-58c965ab69a3\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk" Oct 01 05:40:43 crc kubenswrapper[4661]: I1001 05:40:43.228723 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a49ff2d-3e51-4e56-9062-58c965ab69a3-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk\" (UID: \"4a49ff2d-3e51-4e56-9062-58c965ab69a3\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk" Oct 01 05:40:43 crc kubenswrapper[4661]: I1001 05:40:43.228858 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a49ff2d-3e51-4e56-9062-58c965ab69a3-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk\" (UID: \"4a49ff2d-3e51-4e56-9062-58c965ab69a3\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk" Oct 01 05:40:43 crc kubenswrapper[4661]: I1001 05:40:43.262820 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8mqq\" (UniqueName: \"kubernetes.io/projected/4a49ff2d-3e51-4e56-9062-58c965ab69a3-kube-api-access-z8mqq\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk\" (UID: \"4a49ff2d-3e51-4e56-9062-58c965ab69a3\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk" Oct 01 05:40:43 crc kubenswrapper[4661]: I1001 05:40:43.345027 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk" Oct 01 05:40:43 crc kubenswrapper[4661]: I1001 05:40:43.612122 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk"] Oct 01 05:40:44 crc kubenswrapper[4661]: I1001 05:40:44.275752 4661 generic.go:334] "Generic (PLEG): container finished" podID="4a49ff2d-3e51-4e56-9062-58c965ab69a3" containerID="69a0ff15b6f22b32f83c819dcf1b661b696fe77cccb4c78ab112c21a4d423cc9" exitCode=0 Oct 01 05:40:44 crc kubenswrapper[4661]: I1001 05:40:44.275832 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk" event={"ID":"4a49ff2d-3e51-4e56-9062-58c965ab69a3","Type":"ContainerDied","Data":"69a0ff15b6f22b32f83c819dcf1b661b696fe77cccb4c78ab112c21a4d423cc9"} Oct 01 05:40:44 crc kubenswrapper[4661]: I1001 05:40:44.275903 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk" event={"ID":"4a49ff2d-3e51-4e56-9062-58c965ab69a3","Type":"ContainerStarted","Data":"5c4c7af9c7c941c59246ed87df55b6779bf37dedc98d54a8f492d9b14cbbc909"} Oct 01 05:40:46 crc kubenswrapper[4661]: I1001 05:40:46.291301 4661 generic.go:334] "Generic (PLEG): container finished" podID="4a49ff2d-3e51-4e56-9062-58c965ab69a3" containerID="94feefa7ec149a953e0fbb25066c3aba34e5469bd844342fb69ac7bdfcb45b45" exitCode=0 Oct 01 05:40:46 crc kubenswrapper[4661]: I1001 05:40:46.291407 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk" event={"ID":"4a49ff2d-3e51-4e56-9062-58c965ab69a3","Type":"ContainerDied","Data":"94feefa7ec149a953e0fbb25066c3aba34e5469bd844342fb69ac7bdfcb45b45"} Oct 01 05:40:47 crc kubenswrapper[4661]: I1001 05:40:47.303498 4661 generic.go:334] "Generic (PLEG): container finished" podID="4a49ff2d-3e51-4e56-9062-58c965ab69a3" containerID="3f164b631ece4497915278e5bf6f94898b043ef8d679ccbfffde99a5021e873e" exitCode=0 Oct 01 05:40:47 crc kubenswrapper[4661]: I1001 05:40:47.303701 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk" event={"ID":"4a49ff2d-3e51-4e56-9062-58c965ab69a3","Type":"ContainerDied","Data":"3f164b631ece4497915278e5bf6f94898b043ef8d679ccbfffde99a5021e873e"} Oct 01 05:40:48 crc kubenswrapper[4661]: I1001 05:40:48.585614 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk" Oct 01 05:40:48 crc kubenswrapper[4661]: I1001 05:40:48.597118 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a49ff2d-3e51-4e56-9062-58c965ab69a3-util\") pod \"4a49ff2d-3e51-4e56-9062-58c965ab69a3\" (UID: \"4a49ff2d-3e51-4e56-9062-58c965ab69a3\") " Oct 01 05:40:48 crc kubenswrapper[4661]: I1001 05:40:48.597207 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a49ff2d-3e51-4e56-9062-58c965ab69a3-bundle\") pod \"4a49ff2d-3e51-4e56-9062-58c965ab69a3\" (UID: \"4a49ff2d-3e51-4e56-9062-58c965ab69a3\") " Oct 01 05:40:48 crc kubenswrapper[4661]: I1001 05:40:48.597294 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8mqq\" (UniqueName: \"kubernetes.io/projected/4a49ff2d-3e51-4e56-9062-58c965ab69a3-kube-api-access-z8mqq\") pod \"4a49ff2d-3e51-4e56-9062-58c965ab69a3\" (UID: \"4a49ff2d-3e51-4e56-9062-58c965ab69a3\") " Oct 01 05:40:48 crc kubenswrapper[4661]: I1001 05:40:48.597989 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a49ff2d-3e51-4e56-9062-58c965ab69a3-bundle" (OuterVolumeSpecName: "bundle") pod "4a49ff2d-3e51-4e56-9062-58c965ab69a3" (UID: "4a49ff2d-3e51-4e56-9062-58c965ab69a3"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:40:48 crc kubenswrapper[4661]: I1001 05:40:48.603933 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a49ff2d-3e51-4e56-9062-58c965ab69a3-kube-api-access-z8mqq" (OuterVolumeSpecName: "kube-api-access-z8mqq") pod "4a49ff2d-3e51-4e56-9062-58c965ab69a3" (UID: "4a49ff2d-3e51-4e56-9062-58c965ab69a3"). InnerVolumeSpecName "kube-api-access-z8mqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:40:48 crc kubenswrapper[4661]: I1001 05:40:48.621351 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a49ff2d-3e51-4e56-9062-58c965ab69a3-util" (OuterVolumeSpecName: "util") pod "4a49ff2d-3e51-4e56-9062-58c965ab69a3" (UID: "4a49ff2d-3e51-4e56-9062-58c965ab69a3"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:40:48 crc kubenswrapper[4661]: I1001 05:40:48.698211 4661 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a49ff2d-3e51-4e56-9062-58c965ab69a3-util\") on node \"crc\" DevicePath \"\"" Oct 01 05:40:48 crc kubenswrapper[4661]: I1001 05:40:48.698243 4661 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a49ff2d-3e51-4e56-9062-58c965ab69a3-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:40:48 crc kubenswrapper[4661]: I1001 05:40:48.698252 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8mqq\" (UniqueName: \"kubernetes.io/projected/4a49ff2d-3e51-4e56-9062-58c965ab69a3-kube-api-access-z8mqq\") on node \"crc\" DevicePath \"\"" Oct 01 05:40:49 crc kubenswrapper[4661]: I1001 05:40:49.321398 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk" event={"ID":"4a49ff2d-3e51-4e56-9062-58c965ab69a3","Type":"ContainerDied","Data":"5c4c7af9c7c941c59246ed87df55b6779bf37dedc98d54a8f492d9b14cbbc909"} Oct 01 05:40:49 crc kubenswrapper[4661]: I1001 05:40:49.321452 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5c4c7af9c7c941c59246ed87df55b6779bf37dedc98d54a8f492d9b14cbbc909" Oct 01 05:40:49 crc kubenswrapper[4661]: I1001 05:40:49.321533 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk" Oct 01 05:40:52 crc kubenswrapper[4661]: I1001 05:40:52.023409 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-6gctp"] Oct 01 05:40:52 crc kubenswrapper[4661]: E1001 05:40:52.023855 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a49ff2d-3e51-4e56-9062-58c965ab69a3" containerName="pull" Oct 01 05:40:52 crc kubenswrapper[4661]: I1001 05:40:52.023866 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a49ff2d-3e51-4e56-9062-58c965ab69a3" containerName="pull" Oct 01 05:40:52 crc kubenswrapper[4661]: E1001 05:40:52.023879 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a49ff2d-3e51-4e56-9062-58c965ab69a3" containerName="util" Oct 01 05:40:52 crc kubenswrapper[4661]: I1001 05:40:52.023885 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a49ff2d-3e51-4e56-9062-58c965ab69a3" containerName="util" Oct 01 05:40:52 crc kubenswrapper[4661]: E1001 05:40:52.023895 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a49ff2d-3e51-4e56-9062-58c965ab69a3" containerName="extract" Oct 01 05:40:52 crc kubenswrapper[4661]: I1001 05:40:52.023901 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a49ff2d-3e51-4e56-9062-58c965ab69a3" containerName="extract" Oct 01 05:40:52 crc kubenswrapper[4661]: I1001 05:40:52.023990 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a49ff2d-3e51-4e56-9062-58c965ab69a3" containerName="extract" Oct 01 05:40:52 crc kubenswrapper[4661]: I1001 05:40:52.024359 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-6gctp" Oct 01 05:40:52 crc kubenswrapper[4661]: I1001 05:40:52.027258 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Oct 01 05:40:52 crc kubenswrapper[4661]: I1001 05:40:52.027803 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-2mlb5" Oct 01 05:40:52 crc kubenswrapper[4661]: I1001 05:40:52.028293 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Oct 01 05:40:52 crc kubenswrapper[4661]: I1001 05:40:52.044204 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-6gctp"] Oct 01 05:40:52 crc kubenswrapper[4661]: I1001 05:40:52.046715 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbkr5\" (UniqueName: \"kubernetes.io/projected/185fc7e0-5985-4401-9b50-60a661708075-kube-api-access-sbkr5\") pod \"nmstate-operator-5d6f6cfd66-6gctp\" (UID: \"185fc7e0-5985-4401-9b50-60a661708075\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-6gctp" Oct 01 05:40:52 crc kubenswrapper[4661]: I1001 05:40:52.147881 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbkr5\" (UniqueName: \"kubernetes.io/projected/185fc7e0-5985-4401-9b50-60a661708075-kube-api-access-sbkr5\") pod \"nmstate-operator-5d6f6cfd66-6gctp\" (UID: \"185fc7e0-5985-4401-9b50-60a661708075\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-6gctp" Oct 01 05:40:52 crc kubenswrapper[4661]: I1001 05:40:52.190028 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbkr5\" (UniqueName: \"kubernetes.io/projected/185fc7e0-5985-4401-9b50-60a661708075-kube-api-access-sbkr5\") pod \"nmstate-operator-5d6f6cfd66-6gctp\" (UID: \"185fc7e0-5985-4401-9b50-60a661708075\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-6gctp" Oct 01 05:40:52 crc kubenswrapper[4661]: I1001 05:40:52.342356 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-6gctp" Oct 01 05:40:52 crc kubenswrapper[4661]: I1001 05:40:52.857481 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-6gctp"] Oct 01 05:40:53 crc kubenswrapper[4661]: I1001 05:40:53.346125 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-6gctp" event={"ID":"185fc7e0-5985-4401-9b50-60a661708075","Type":"ContainerStarted","Data":"c68d398ca03884684746783615308ce863621f9f20ac6681d4f1cef50c253c8f"} Oct 01 05:40:55 crc kubenswrapper[4661]: I1001 05:40:55.359115 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-6gctp" event={"ID":"185fc7e0-5985-4401-9b50-60a661708075","Type":"ContainerStarted","Data":"62c8df3a916f649c68c81c59b257d46de0996f203fc2c18efee8776f2acc9e3b"} Oct 01 05:40:55 crc kubenswrapper[4661]: I1001 05:40:55.382822 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-6gctp" podStartSLOduration=1.332306829 podStartE2EDuration="3.382803468s" podCreationTimestamp="2025-10-01 05:40:52 +0000 UTC" firstStartedPulling="2025-10-01 05:40:52.872931136 +0000 UTC m=+701.810909760" lastFinishedPulling="2025-10-01 05:40:54.923427785 +0000 UTC m=+703.861406399" observedRunningTime="2025-10-01 05:40:55.380132482 +0000 UTC m=+704.318111106" watchObservedRunningTime="2025-10-01 05:40:55.382803468 +0000 UTC m=+704.320782082" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.498273 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-9nbcr"] Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.500048 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-9nbcr" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.502737 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-qkzcj"] Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.503399 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-qkzcj" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.504520 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.504882 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-ndzhl" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.529818 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-qkzcj"] Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.543297 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-jp5md"] Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.544131 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-jp5md" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.553516 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-9nbcr"] Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.613848 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/49f9f7ff-ea7a-42c4-ad95-e8a05841ab36-nmstate-lock\") pod \"nmstate-handler-jp5md\" (UID: \"49f9f7ff-ea7a-42c4-ad95-e8a05841ab36\") " pod="openshift-nmstate/nmstate-handler-jp5md" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.613889 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fh6kz\" (UniqueName: \"kubernetes.io/projected/b4aa2a05-edba-4a48-a854-8c05535af455-kube-api-access-fh6kz\") pod \"nmstate-webhook-6d689559c5-qkzcj\" (UID: \"b4aa2a05-edba-4a48-a854-8c05535af455\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-qkzcj" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.613937 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/49f9f7ff-ea7a-42c4-ad95-e8a05841ab36-ovs-socket\") pod \"nmstate-handler-jp5md\" (UID: \"49f9f7ff-ea7a-42c4-ad95-e8a05841ab36\") " pod="openshift-nmstate/nmstate-handler-jp5md" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.613975 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/49f9f7ff-ea7a-42c4-ad95-e8a05841ab36-dbus-socket\") pod \"nmstate-handler-jp5md\" (UID: \"49f9f7ff-ea7a-42c4-ad95-e8a05841ab36\") " pod="openshift-nmstate/nmstate-handler-jp5md" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.613999 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhjdw\" (UniqueName: \"kubernetes.io/projected/06d34118-bb4a-4b6f-9637-2fdac6465088-kube-api-access-fhjdw\") pod \"nmstate-metrics-58fcddf996-9nbcr\" (UID: \"06d34118-bb4a-4b6f-9637-2fdac6465088\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-9nbcr" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.614024 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zthjm\" (UniqueName: \"kubernetes.io/projected/49f9f7ff-ea7a-42c4-ad95-e8a05841ab36-kube-api-access-zthjm\") pod \"nmstate-handler-jp5md\" (UID: \"49f9f7ff-ea7a-42c4-ad95-e8a05841ab36\") " pod="openshift-nmstate/nmstate-handler-jp5md" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.614043 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/b4aa2a05-edba-4a48-a854-8c05535af455-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-qkzcj\" (UID: \"b4aa2a05-edba-4a48-a854-8c05535af455\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-qkzcj" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.651933 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-xpx7n"] Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.652552 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-xpx7n" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.655856 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-6l64j" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.657273 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.657956 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.663055 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-xpx7n"] Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.715243 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bs9sw\" (UniqueName: \"kubernetes.io/projected/db1944d4-6048-4e0c-86fc-3f37d4a653bc-kube-api-access-bs9sw\") pod \"nmstate-console-plugin-864bb6dfb5-xpx7n\" (UID: \"db1944d4-6048-4e0c-86fc-3f37d4a653bc\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-xpx7n" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.715288 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/49f9f7ff-ea7a-42c4-ad95-e8a05841ab36-nmstate-lock\") pod \"nmstate-handler-jp5md\" (UID: \"49f9f7ff-ea7a-42c4-ad95-e8a05841ab36\") " pod="openshift-nmstate/nmstate-handler-jp5md" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.715317 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fh6kz\" (UniqueName: \"kubernetes.io/projected/b4aa2a05-edba-4a48-a854-8c05535af455-kube-api-access-fh6kz\") pod \"nmstate-webhook-6d689559c5-qkzcj\" (UID: \"b4aa2a05-edba-4a48-a854-8c05535af455\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-qkzcj" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.715340 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/db1944d4-6048-4e0c-86fc-3f37d4a653bc-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-xpx7n\" (UID: \"db1944d4-6048-4e0c-86fc-3f37d4a653bc\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-xpx7n" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.715367 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/49f9f7ff-ea7a-42c4-ad95-e8a05841ab36-ovs-socket\") pod \"nmstate-handler-jp5md\" (UID: \"49f9f7ff-ea7a-42c4-ad95-e8a05841ab36\") " pod="openshift-nmstate/nmstate-handler-jp5md" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.715390 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/49f9f7ff-ea7a-42c4-ad95-e8a05841ab36-dbus-socket\") pod \"nmstate-handler-jp5md\" (UID: \"49f9f7ff-ea7a-42c4-ad95-e8a05841ab36\") " pod="openshift-nmstate/nmstate-handler-jp5md" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.715391 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/49f9f7ff-ea7a-42c4-ad95-e8a05841ab36-nmstate-lock\") pod \"nmstate-handler-jp5md\" (UID: \"49f9f7ff-ea7a-42c4-ad95-e8a05841ab36\") " pod="openshift-nmstate/nmstate-handler-jp5md" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.715422 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhjdw\" (UniqueName: \"kubernetes.io/projected/06d34118-bb4a-4b6f-9637-2fdac6465088-kube-api-access-fhjdw\") pod \"nmstate-metrics-58fcddf996-9nbcr\" (UID: \"06d34118-bb4a-4b6f-9637-2fdac6465088\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-9nbcr" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.715436 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/49f9f7ff-ea7a-42c4-ad95-e8a05841ab36-ovs-socket\") pod \"nmstate-handler-jp5md\" (UID: \"49f9f7ff-ea7a-42c4-ad95-e8a05841ab36\") " pod="openshift-nmstate/nmstate-handler-jp5md" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.715448 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zthjm\" (UniqueName: \"kubernetes.io/projected/49f9f7ff-ea7a-42c4-ad95-e8a05841ab36-kube-api-access-zthjm\") pod \"nmstate-handler-jp5md\" (UID: \"49f9f7ff-ea7a-42c4-ad95-e8a05841ab36\") " pod="openshift-nmstate/nmstate-handler-jp5md" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.715470 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/b4aa2a05-edba-4a48-a854-8c05535af455-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-qkzcj\" (UID: \"b4aa2a05-edba-4a48-a854-8c05535af455\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-qkzcj" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.715501 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/db1944d4-6048-4e0c-86fc-3f37d4a653bc-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-xpx7n\" (UID: \"db1944d4-6048-4e0c-86fc-3f37d4a653bc\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-xpx7n" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.715741 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/49f9f7ff-ea7a-42c4-ad95-e8a05841ab36-dbus-socket\") pod \"nmstate-handler-jp5md\" (UID: \"49f9f7ff-ea7a-42c4-ad95-e8a05841ab36\") " pod="openshift-nmstate/nmstate-handler-jp5md" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.720431 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/b4aa2a05-edba-4a48-a854-8c05535af455-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-qkzcj\" (UID: \"b4aa2a05-edba-4a48-a854-8c05535af455\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-qkzcj" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.732454 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zthjm\" (UniqueName: \"kubernetes.io/projected/49f9f7ff-ea7a-42c4-ad95-e8a05841ab36-kube-api-access-zthjm\") pod \"nmstate-handler-jp5md\" (UID: \"49f9f7ff-ea7a-42c4-ad95-e8a05841ab36\") " pod="openshift-nmstate/nmstate-handler-jp5md" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.732494 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhjdw\" (UniqueName: \"kubernetes.io/projected/06d34118-bb4a-4b6f-9637-2fdac6465088-kube-api-access-fhjdw\") pod \"nmstate-metrics-58fcddf996-9nbcr\" (UID: \"06d34118-bb4a-4b6f-9637-2fdac6465088\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-9nbcr" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.733447 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fh6kz\" (UniqueName: \"kubernetes.io/projected/b4aa2a05-edba-4a48-a854-8c05535af455-kube-api-access-fh6kz\") pod \"nmstate-webhook-6d689559c5-qkzcj\" (UID: \"b4aa2a05-edba-4a48-a854-8c05535af455\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-qkzcj" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.816396 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-9nbcr" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.816669 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/db1944d4-6048-4e0c-86fc-3f37d4a653bc-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-xpx7n\" (UID: \"db1944d4-6048-4e0c-86fc-3f37d4a653bc\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-xpx7n" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.816698 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-68f4ff5c48-592xm"] Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.816718 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bs9sw\" (UniqueName: \"kubernetes.io/projected/db1944d4-6048-4e0c-86fc-3f37d4a653bc-kube-api-access-bs9sw\") pod \"nmstate-console-plugin-864bb6dfb5-xpx7n\" (UID: \"db1944d4-6048-4e0c-86fc-3f37d4a653bc\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-xpx7n" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.816742 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/db1944d4-6048-4e0c-86fc-3f37d4a653bc-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-xpx7n\" (UID: \"db1944d4-6048-4e0c-86fc-3f37d4a653bc\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-xpx7n" Oct 01 05:40:56 crc kubenswrapper[4661]: E1001 05:40:56.816921 4661 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Oct 01 05:40:56 crc kubenswrapper[4661]: E1001 05:40:56.817016 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/db1944d4-6048-4e0c-86fc-3f37d4a653bc-plugin-serving-cert podName:db1944d4-6048-4e0c-86fc-3f37d4a653bc nodeName:}" failed. No retries permitted until 2025-10-01 05:40:57.316995033 +0000 UTC m=+706.254973667 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/db1944d4-6048-4e0c-86fc-3f37d4a653bc-plugin-serving-cert") pod "nmstate-console-plugin-864bb6dfb5-xpx7n" (UID: "db1944d4-6048-4e0c-86fc-3f37d4a653bc") : secret "plugin-serving-cert" not found Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.817309 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.826308 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/db1944d4-6048-4e0c-86fc-3f37d4a653bc-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-xpx7n\" (UID: \"db1944d4-6048-4e0c-86fc-3f37d4a653bc\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-xpx7n" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.826883 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-qkzcj" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.837199 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-68f4ff5c48-592xm"] Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.840783 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bs9sw\" (UniqueName: \"kubernetes.io/projected/db1944d4-6048-4e0c-86fc-3f37d4a653bc-kube-api-access-bs9sw\") pod \"nmstate-console-plugin-864bb6dfb5-xpx7n\" (UID: \"db1944d4-6048-4e0c-86fc-3f37d4a653bc\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-xpx7n" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.857657 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-jp5md" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.917307 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rk2zv\" (UniqueName: \"kubernetes.io/projected/a978eb56-bdfe-4af0-a3cf-ce2dc12f441a-kube-api-access-rk2zv\") pod \"console-68f4ff5c48-592xm\" (UID: \"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a\") " pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.917363 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a978eb56-bdfe-4af0-a3cf-ce2dc12f441a-service-ca\") pod \"console-68f4ff5c48-592xm\" (UID: \"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a\") " pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.917394 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a978eb56-bdfe-4af0-a3cf-ce2dc12f441a-oauth-serving-cert\") pod \"console-68f4ff5c48-592xm\" (UID: \"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a\") " pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.917411 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a978eb56-bdfe-4af0-a3cf-ce2dc12f441a-console-config\") pod \"console-68f4ff5c48-592xm\" (UID: \"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a\") " pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.917428 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a978eb56-bdfe-4af0-a3cf-ce2dc12f441a-console-oauth-config\") pod \"console-68f4ff5c48-592xm\" (UID: \"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a\") " pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.917447 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a978eb56-bdfe-4af0-a3cf-ce2dc12f441a-console-serving-cert\") pod \"console-68f4ff5c48-592xm\" (UID: \"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a\") " pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:56 crc kubenswrapper[4661]: I1001 05:40:56.917530 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a978eb56-bdfe-4af0-a3cf-ce2dc12f441a-trusted-ca-bundle\") pod \"console-68f4ff5c48-592xm\" (UID: \"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a\") " pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.018403 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a978eb56-bdfe-4af0-a3cf-ce2dc12f441a-trusted-ca-bundle\") pod \"console-68f4ff5c48-592xm\" (UID: \"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a\") " pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.018495 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rk2zv\" (UniqueName: \"kubernetes.io/projected/a978eb56-bdfe-4af0-a3cf-ce2dc12f441a-kube-api-access-rk2zv\") pod \"console-68f4ff5c48-592xm\" (UID: \"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a\") " pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.018537 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a978eb56-bdfe-4af0-a3cf-ce2dc12f441a-service-ca\") pod \"console-68f4ff5c48-592xm\" (UID: \"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a\") " pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.018578 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a978eb56-bdfe-4af0-a3cf-ce2dc12f441a-oauth-serving-cert\") pod \"console-68f4ff5c48-592xm\" (UID: \"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a\") " pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.018602 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a978eb56-bdfe-4af0-a3cf-ce2dc12f441a-console-config\") pod \"console-68f4ff5c48-592xm\" (UID: \"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a\") " pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.018643 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a978eb56-bdfe-4af0-a3cf-ce2dc12f441a-console-oauth-config\") pod \"console-68f4ff5c48-592xm\" (UID: \"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a\") " pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.018707 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a978eb56-bdfe-4af0-a3cf-ce2dc12f441a-console-serving-cert\") pod \"console-68f4ff5c48-592xm\" (UID: \"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a\") " pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.019977 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a978eb56-bdfe-4af0-a3cf-ce2dc12f441a-service-ca\") pod \"console-68f4ff5c48-592xm\" (UID: \"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a\") " pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.020059 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a978eb56-bdfe-4af0-a3cf-ce2dc12f441a-trusted-ca-bundle\") pod \"console-68f4ff5c48-592xm\" (UID: \"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a\") " pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.020624 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a978eb56-bdfe-4af0-a3cf-ce2dc12f441a-oauth-serving-cert\") pod \"console-68f4ff5c48-592xm\" (UID: \"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a\") " pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.020928 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a978eb56-bdfe-4af0-a3cf-ce2dc12f441a-console-config\") pod \"console-68f4ff5c48-592xm\" (UID: \"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a\") " pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.023990 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a978eb56-bdfe-4af0-a3cf-ce2dc12f441a-console-serving-cert\") pod \"console-68f4ff5c48-592xm\" (UID: \"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a\") " pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.024344 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a978eb56-bdfe-4af0-a3cf-ce2dc12f441a-console-oauth-config\") pod \"console-68f4ff5c48-592xm\" (UID: \"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a\") " pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.042279 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rk2zv\" (UniqueName: \"kubernetes.io/projected/a978eb56-bdfe-4af0-a3cf-ce2dc12f441a-kube-api-access-rk2zv\") pod \"console-68f4ff5c48-592xm\" (UID: \"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a\") " pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.099254 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-qkzcj"] Oct 01 05:40:57 crc kubenswrapper[4661]: W1001 05:40:57.103494 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb4aa2a05_edba_4a48_a854_8c05535af455.slice/crio-41d9958d063d3a53c8b4fc6906adee8d9b88369ea22d78fe7938a6be41428441 WatchSource:0}: Error finding container 41d9958d063d3a53c8b4fc6906adee8d9b88369ea22d78fe7938a6be41428441: Status 404 returned error can't find the container with id 41d9958d063d3a53c8b4fc6906adee8d9b88369ea22d78fe7938a6be41428441 Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.212066 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.248056 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-9nbcr"] Oct 01 05:40:57 crc kubenswrapper[4661]: W1001 05:40:57.257877 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06d34118_bb4a_4b6f_9637_2fdac6465088.slice/crio-caaf15d8b12ce2c1ebc2ec828add5fb872995bffb42dc2b67ad36d7b535f0bf9 WatchSource:0}: Error finding container caaf15d8b12ce2c1ebc2ec828add5fb872995bffb42dc2b67ad36d7b535f0bf9: Status 404 returned error can't find the container with id caaf15d8b12ce2c1ebc2ec828add5fb872995bffb42dc2b67ad36d7b535f0bf9 Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.323215 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/db1944d4-6048-4e0c-86fc-3f37d4a653bc-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-xpx7n\" (UID: \"db1944d4-6048-4e0c-86fc-3f37d4a653bc\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-xpx7n" Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.327042 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/db1944d4-6048-4e0c-86fc-3f37d4a653bc-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-xpx7n\" (UID: \"db1944d4-6048-4e0c-86fc-3f37d4a653bc\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-xpx7n" Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.374966 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-jp5md" event={"ID":"49f9f7ff-ea7a-42c4-ad95-e8a05841ab36","Type":"ContainerStarted","Data":"d47d3a68486c3690f6324cc212e55aa07b105b09166821cb9e38d624c0e9426a"} Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.376860 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-9nbcr" event={"ID":"06d34118-bb4a-4b6f-9637-2fdac6465088","Type":"ContainerStarted","Data":"caaf15d8b12ce2c1ebc2ec828add5fb872995bffb42dc2b67ad36d7b535f0bf9"} Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.381438 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-qkzcj" event={"ID":"b4aa2a05-edba-4a48-a854-8c05535af455","Type":"ContainerStarted","Data":"41d9958d063d3a53c8b4fc6906adee8d9b88369ea22d78fe7938a6be41428441"} Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.452763 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-68f4ff5c48-592xm"] Oct 01 05:40:57 crc kubenswrapper[4661]: W1001 05:40:57.461548 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda978eb56_bdfe_4af0_a3cf_ce2dc12f441a.slice/crio-cd35ab62507cb04771a88f5afacae747e0e1c1034e429ecc6a6efeffd22e50e8 WatchSource:0}: Error finding container cd35ab62507cb04771a88f5afacae747e0e1c1034e429ecc6a6efeffd22e50e8: Status 404 returned error can't find the container with id cd35ab62507cb04771a88f5afacae747e0e1c1034e429ecc6a6efeffd22e50e8 Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.564088 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-xpx7n" Oct 01 05:40:57 crc kubenswrapper[4661]: I1001 05:40:57.837391 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-xpx7n"] Oct 01 05:40:57 crc kubenswrapper[4661]: W1001 05:40:57.846747 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddb1944d4_6048_4e0c_86fc_3f37d4a653bc.slice/crio-cce0002e1a4bc39d17c0b068e075a86c6ac15f78000e47650ab4e422bea3c99f WatchSource:0}: Error finding container cce0002e1a4bc39d17c0b068e075a86c6ac15f78000e47650ab4e422bea3c99f: Status 404 returned error can't find the container with id cce0002e1a4bc39d17c0b068e075a86c6ac15f78000e47650ab4e422bea3c99f Oct 01 05:40:58 crc kubenswrapper[4661]: I1001 05:40:58.388168 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-68f4ff5c48-592xm" event={"ID":"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a","Type":"ContainerStarted","Data":"22ebe5d285fb0545c7b2a96d9e248a067f67980347bbec3fdd23f712c385f51f"} Oct 01 05:40:58 crc kubenswrapper[4661]: I1001 05:40:58.388217 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-68f4ff5c48-592xm" event={"ID":"a978eb56-bdfe-4af0-a3cf-ce2dc12f441a","Type":"ContainerStarted","Data":"cd35ab62507cb04771a88f5afacae747e0e1c1034e429ecc6a6efeffd22e50e8"} Oct 01 05:40:58 crc kubenswrapper[4661]: I1001 05:40:58.392838 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-xpx7n" event={"ID":"db1944d4-6048-4e0c-86fc-3f37d4a653bc","Type":"ContainerStarted","Data":"cce0002e1a4bc39d17c0b068e075a86c6ac15f78000e47650ab4e422bea3c99f"} Oct 01 05:41:01 crc kubenswrapper[4661]: I1001 05:41:01.423952 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-jp5md" event={"ID":"49f9f7ff-ea7a-42c4-ad95-e8a05841ab36","Type":"ContainerStarted","Data":"b01b61beb601a9bfcc95f7e631b87874f01744fcdeb688b8fc09a854ccf9b677"} Oct 01 05:41:01 crc kubenswrapper[4661]: I1001 05:41:01.425765 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-jp5md" Oct 01 05:41:01 crc kubenswrapper[4661]: I1001 05:41:01.434952 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-9nbcr" event={"ID":"06d34118-bb4a-4b6f-9637-2fdac6465088","Type":"ContainerStarted","Data":"e165df2b2e8455bf3e271cf1adf661211473339f040729eca47c99b814afa796"} Oct 01 05:41:01 crc kubenswrapper[4661]: I1001 05:41:01.448611 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-68f4ff5c48-592xm" podStartSLOduration=5.448590549 podStartE2EDuration="5.448590549s" podCreationTimestamp="2025-10-01 05:40:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:40:58.410384793 +0000 UTC m=+707.348363427" watchObservedRunningTime="2025-10-01 05:41:01.448590549 +0000 UTC m=+710.386569163" Oct 01 05:41:01 crc kubenswrapper[4661]: I1001 05:41:01.450869 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-xpx7n" event={"ID":"db1944d4-6048-4e0c-86fc-3f37d4a653bc","Type":"ContainerStarted","Data":"23d468383ac7ca29067af1ebb870dccc0c4c206c233269eebbd178adf86dd2ba"} Oct 01 05:41:01 crc kubenswrapper[4661]: I1001 05:41:01.457969 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-qkzcj" event={"ID":"b4aa2a05-edba-4a48-a854-8c05535af455","Type":"ContainerStarted","Data":"111f8f0ba5caa247ef3ecc79bb5d7d89ff6071e11de3d2d78f437211c341c9df"} Oct 01 05:41:01 crc kubenswrapper[4661]: I1001 05:41:01.459022 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6d689559c5-qkzcj" Oct 01 05:41:01 crc kubenswrapper[4661]: I1001 05:41:01.469523 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-jp5md" podStartSLOduration=2.086214218 podStartE2EDuration="5.469500761s" podCreationTimestamp="2025-10-01 05:40:56 +0000 UTC" firstStartedPulling="2025-10-01 05:40:56.891549463 +0000 UTC m=+705.829528077" lastFinishedPulling="2025-10-01 05:41:00.274835976 +0000 UTC m=+709.212814620" observedRunningTime="2025-10-01 05:41:01.449140374 +0000 UTC m=+710.387118998" watchObservedRunningTime="2025-10-01 05:41:01.469500761 +0000 UTC m=+710.407479375" Oct 01 05:41:01 crc kubenswrapper[4661]: I1001 05:41:01.471963 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-xpx7n" podStartSLOduration=3.057546672 podStartE2EDuration="5.471946691s" podCreationTimestamp="2025-10-01 05:40:56 +0000 UTC" firstStartedPulling="2025-10-01 05:40:57.849986822 +0000 UTC m=+706.787965436" lastFinishedPulling="2025-10-01 05:41:00.264386821 +0000 UTC m=+709.202365455" observedRunningTime="2025-10-01 05:41:01.47090037 +0000 UTC m=+710.408878984" watchObservedRunningTime="2025-10-01 05:41:01.471946691 +0000 UTC m=+710.409925315" Oct 01 05:41:01 crc kubenswrapper[4661]: I1001 05:41:01.797422 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6d689559c5-qkzcj" podStartSLOduration=2.606621547 podStartE2EDuration="5.797397082s" podCreationTimestamp="2025-10-01 05:40:56 +0000 UTC" firstStartedPulling="2025-10-01 05:40:57.105383255 +0000 UTC m=+706.043361869" lastFinishedPulling="2025-10-01 05:41:00.29615879 +0000 UTC m=+709.234137404" observedRunningTime="2025-10-01 05:41:01.505346975 +0000 UTC m=+710.443325599" watchObservedRunningTime="2025-10-01 05:41:01.797397082 +0000 UTC m=+710.735375726" Oct 01 05:41:03 crc kubenswrapper[4661]: I1001 05:41:03.476625 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-9nbcr" event={"ID":"06d34118-bb4a-4b6f-9637-2fdac6465088","Type":"ContainerStarted","Data":"f7df858a29743541ac151bad08f502a77ea7d99b010783f9ad5670e3aed4b5c1"} Oct 01 05:41:03 crc kubenswrapper[4661]: I1001 05:41:03.509305 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-58fcddf996-9nbcr" podStartSLOduration=1.836221472 podStartE2EDuration="7.509271877s" podCreationTimestamp="2025-10-01 05:40:56 +0000 UTC" firstStartedPulling="2025-10-01 05:40:57.263239253 +0000 UTC m=+706.201217867" lastFinishedPulling="2025-10-01 05:41:02.936289618 +0000 UTC m=+711.874268272" observedRunningTime="2025-10-01 05:41:03.504526102 +0000 UTC m=+712.442504766" watchObservedRunningTime="2025-10-01 05:41:03.509271877 +0000 UTC m=+712.447250541" Oct 01 05:41:04 crc kubenswrapper[4661]: I1001 05:41:04.310134 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:41:04 crc kubenswrapper[4661]: I1001 05:41:04.310594 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:41:06 crc kubenswrapper[4661]: I1001 05:41:06.894105 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-jp5md" Oct 01 05:41:07 crc kubenswrapper[4661]: I1001 05:41:07.213038 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:41:07 crc kubenswrapper[4661]: I1001 05:41:07.213138 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:41:07 crc kubenswrapper[4661]: I1001 05:41:07.221044 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:41:07 crc kubenswrapper[4661]: I1001 05:41:07.512272 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-68f4ff5c48-592xm" Oct 01 05:41:07 crc kubenswrapper[4661]: I1001 05:41:07.587577 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-bnbps"] Oct 01 05:41:16 crc kubenswrapper[4661]: I1001 05:41:16.836370 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6d689559c5-qkzcj" Oct 01 05:41:32 crc kubenswrapper[4661]: I1001 05:41:32.467487 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr"] Oct 01 05:41:32 crc kubenswrapper[4661]: I1001 05:41:32.469440 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr" Oct 01 05:41:32 crc kubenswrapper[4661]: I1001 05:41:32.470917 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 01 05:41:32 crc kubenswrapper[4661]: I1001 05:41:32.488469 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr"] Oct 01 05:41:32 crc kubenswrapper[4661]: I1001 05:41:32.551269 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wdsx\" (UniqueName: \"kubernetes.io/projected/d7850164-3b1e-4bd1-b8c8-691c54963d36-kube-api-access-8wdsx\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr\" (UID: \"d7850164-3b1e-4bd1-b8c8-691c54963d36\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr" Oct 01 05:41:32 crc kubenswrapper[4661]: I1001 05:41:32.551369 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d7850164-3b1e-4bd1-b8c8-691c54963d36-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr\" (UID: \"d7850164-3b1e-4bd1-b8c8-691c54963d36\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr" Oct 01 05:41:32 crc kubenswrapper[4661]: I1001 05:41:32.551406 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d7850164-3b1e-4bd1-b8c8-691c54963d36-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr\" (UID: \"d7850164-3b1e-4bd1-b8c8-691c54963d36\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr" Oct 01 05:41:32 crc kubenswrapper[4661]: I1001 05:41:32.652626 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d7850164-3b1e-4bd1-b8c8-691c54963d36-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr\" (UID: \"d7850164-3b1e-4bd1-b8c8-691c54963d36\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr" Oct 01 05:41:32 crc kubenswrapper[4661]: I1001 05:41:32.652763 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d7850164-3b1e-4bd1-b8c8-691c54963d36-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr\" (UID: \"d7850164-3b1e-4bd1-b8c8-691c54963d36\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr" Oct 01 05:41:32 crc kubenswrapper[4661]: I1001 05:41:32.652870 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wdsx\" (UniqueName: \"kubernetes.io/projected/d7850164-3b1e-4bd1-b8c8-691c54963d36-kube-api-access-8wdsx\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr\" (UID: \"d7850164-3b1e-4bd1-b8c8-691c54963d36\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr" Oct 01 05:41:32 crc kubenswrapper[4661]: I1001 05:41:32.653496 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d7850164-3b1e-4bd1-b8c8-691c54963d36-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr\" (UID: \"d7850164-3b1e-4bd1-b8c8-691c54963d36\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr" Oct 01 05:41:32 crc kubenswrapper[4661]: I1001 05:41:32.653532 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d7850164-3b1e-4bd1-b8c8-691c54963d36-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr\" (UID: \"d7850164-3b1e-4bd1-b8c8-691c54963d36\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr" Oct 01 05:41:32 crc kubenswrapper[4661]: I1001 05:41:32.659020 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-bnbps" podUID="591acb44-aa58-4103-98f7-b68e067bc90d" containerName="console" containerID="cri-o://6e81c435e54bd56cd782cb6b7531a55595c06f5651e8f6cf858c10cd8a68dfc6" gracePeriod=15 Oct 01 05:41:32 crc kubenswrapper[4661]: I1001 05:41:32.694283 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wdsx\" (UniqueName: \"kubernetes.io/projected/d7850164-3b1e-4bd1-b8c8-691c54963d36-kube-api-access-8wdsx\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr\" (UID: \"d7850164-3b1e-4bd1-b8c8-691c54963d36\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr" Oct 01 05:41:32 crc kubenswrapper[4661]: I1001 05:41:32.790267 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr" Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.113048 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-bnbps_591acb44-aa58-4103-98f7-b68e067bc90d/console/0.log" Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.113190 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.258998 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/591acb44-aa58-4103-98f7-b68e067bc90d-console-serving-cert\") pod \"591acb44-aa58-4103-98f7-b68e067bc90d\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.259162 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/591acb44-aa58-4103-98f7-b68e067bc90d-console-oauth-config\") pod \"591acb44-aa58-4103-98f7-b68e067bc90d\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.260707 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-oauth-serving-cert\") pod \"591acb44-aa58-4103-98f7-b68e067bc90d\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.260851 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-service-ca\") pod \"591acb44-aa58-4103-98f7-b68e067bc90d\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.261454 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "591acb44-aa58-4103-98f7-b68e067bc90d" (UID: "591acb44-aa58-4103-98f7-b68e067bc90d"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.261843 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-service-ca" (OuterVolumeSpecName: "service-ca") pod "591acb44-aa58-4103-98f7-b68e067bc90d" (UID: "591acb44-aa58-4103-98f7-b68e067bc90d"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.262085 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-console-config\") pod \"591acb44-aa58-4103-98f7-b68e067bc90d\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.262138 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2kql9\" (UniqueName: \"kubernetes.io/projected/591acb44-aa58-4103-98f7-b68e067bc90d-kube-api-access-2kql9\") pod \"591acb44-aa58-4103-98f7-b68e067bc90d\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.262792 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-console-config" (OuterVolumeSpecName: "console-config") pod "591acb44-aa58-4103-98f7-b68e067bc90d" (UID: "591acb44-aa58-4103-98f7-b68e067bc90d"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.263031 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-trusted-ca-bundle\") pod \"591acb44-aa58-4103-98f7-b68e067bc90d\" (UID: \"591acb44-aa58-4103-98f7-b68e067bc90d\") " Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.263492 4661 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.263530 4661 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.263548 4661 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-console-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.263835 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "591acb44-aa58-4103-98f7-b68e067bc90d" (UID: "591acb44-aa58-4103-98f7-b68e067bc90d"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.266730 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/591acb44-aa58-4103-98f7-b68e067bc90d-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "591acb44-aa58-4103-98f7-b68e067bc90d" (UID: "591acb44-aa58-4103-98f7-b68e067bc90d"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.268013 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/591acb44-aa58-4103-98f7-b68e067bc90d-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "591acb44-aa58-4103-98f7-b68e067bc90d" (UID: "591acb44-aa58-4103-98f7-b68e067bc90d"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.268758 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/591acb44-aa58-4103-98f7-b68e067bc90d-kube-api-access-2kql9" (OuterVolumeSpecName: "kube-api-access-2kql9") pod "591acb44-aa58-4103-98f7-b68e067bc90d" (UID: "591acb44-aa58-4103-98f7-b68e067bc90d"). InnerVolumeSpecName "kube-api-access-2kql9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.301973 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr"] Oct 01 05:41:33 crc kubenswrapper[4661]: W1001 05:41:33.313332 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd7850164_3b1e_4bd1_b8c8_691c54963d36.slice/crio-c179494c56a43089d25d0540dc9bce3dd35f38ed8d2c9705a46338bcf57e5fc6 WatchSource:0}: Error finding container c179494c56a43089d25d0540dc9bce3dd35f38ed8d2c9705a46338bcf57e5fc6: Status 404 returned error can't find the container with id c179494c56a43089d25d0540dc9bce3dd35f38ed8d2c9705a46338bcf57e5fc6 Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.365296 4661 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/591acb44-aa58-4103-98f7-b68e067bc90d-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.365346 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2kql9\" (UniqueName: \"kubernetes.io/projected/591acb44-aa58-4103-98f7-b68e067bc90d-kube-api-access-2kql9\") on node \"crc\" DevicePath \"\"" Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.365368 4661 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/591acb44-aa58-4103-98f7-b68e067bc90d-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.365386 4661 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/591acb44-aa58-4103-98f7-b68e067bc90d-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.708463 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-bnbps_591acb44-aa58-4103-98f7-b68e067bc90d/console/0.log" Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.708705 4661 generic.go:334] "Generic (PLEG): container finished" podID="591acb44-aa58-4103-98f7-b68e067bc90d" containerID="6e81c435e54bd56cd782cb6b7531a55595c06f5651e8f6cf858c10cd8a68dfc6" exitCode=2 Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.708781 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-bnbps" Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.709321 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-bnbps" event={"ID":"591acb44-aa58-4103-98f7-b68e067bc90d","Type":"ContainerDied","Data":"6e81c435e54bd56cd782cb6b7531a55595c06f5651e8f6cf858c10cd8a68dfc6"} Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.709406 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-bnbps" event={"ID":"591acb44-aa58-4103-98f7-b68e067bc90d","Type":"ContainerDied","Data":"a33b4c263a20f7a921d44ec94309d1461c0200aac833c6768045602466f890d8"} Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.709432 4661 scope.go:117] "RemoveContainer" containerID="6e81c435e54bd56cd782cb6b7531a55595c06f5651e8f6cf858c10cd8a68dfc6" Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.711291 4661 generic.go:334] "Generic (PLEG): container finished" podID="d7850164-3b1e-4bd1-b8c8-691c54963d36" containerID="407e42a7c45cf4090b66db0bded3ee0dbcd3946dfb4205dffeffcaa5f71d0c5e" exitCode=0 Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.711331 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr" event={"ID":"d7850164-3b1e-4bd1-b8c8-691c54963d36","Type":"ContainerDied","Data":"407e42a7c45cf4090b66db0bded3ee0dbcd3946dfb4205dffeffcaa5f71d0c5e"} Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.711375 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr" event={"ID":"d7850164-3b1e-4bd1-b8c8-691c54963d36","Type":"ContainerStarted","Data":"c179494c56a43089d25d0540dc9bce3dd35f38ed8d2c9705a46338bcf57e5fc6"} Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.728520 4661 scope.go:117] "RemoveContainer" containerID="6e81c435e54bd56cd782cb6b7531a55595c06f5651e8f6cf858c10cd8a68dfc6" Oct 01 05:41:33 crc kubenswrapper[4661]: E1001 05:41:33.728984 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e81c435e54bd56cd782cb6b7531a55595c06f5651e8f6cf858c10cd8a68dfc6\": container with ID starting with 6e81c435e54bd56cd782cb6b7531a55595c06f5651e8f6cf858c10cd8a68dfc6 not found: ID does not exist" containerID="6e81c435e54bd56cd782cb6b7531a55595c06f5651e8f6cf858c10cd8a68dfc6" Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.729027 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e81c435e54bd56cd782cb6b7531a55595c06f5651e8f6cf858c10cd8a68dfc6"} err="failed to get container status \"6e81c435e54bd56cd782cb6b7531a55595c06f5651e8f6cf858c10cd8a68dfc6\": rpc error: code = NotFound desc = could not find container \"6e81c435e54bd56cd782cb6b7531a55595c06f5651e8f6cf858c10cd8a68dfc6\": container with ID starting with 6e81c435e54bd56cd782cb6b7531a55595c06f5651e8f6cf858c10cd8a68dfc6 not found: ID does not exist" Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.743379 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-bnbps"] Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.748072 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-bnbps"] Oct 01 05:41:33 crc kubenswrapper[4661]: I1001 05:41:33.762646 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="591acb44-aa58-4103-98f7-b68e067bc90d" path="/var/lib/kubelet/pods/591acb44-aa58-4103-98f7-b68e067bc90d/volumes" Oct 01 05:41:34 crc kubenswrapper[4661]: I1001 05:41:34.309105 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:41:34 crc kubenswrapper[4661]: I1001 05:41:34.309207 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:41:35 crc kubenswrapper[4661]: I1001 05:41:35.731721 4661 generic.go:334] "Generic (PLEG): container finished" podID="d7850164-3b1e-4bd1-b8c8-691c54963d36" containerID="cc0a7a3de76da8e6bbd9af5836d61672491388fead801c7291aec7cac1b9e69a" exitCode=0 Oct 01 05:41:35 crc kubenswrapper[4661]: I1001 05:41:35.731811 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr" event={"ID":"d7850164-3b1e-4bd1-b8c8-691c54963d36","Type":"ContainerDied","Data":"cc0a7a3de76da8e6bbd9af5836d61672491388fead801c7291aec7cac1b9e69a"} Oct 01 05:41:36 crc kubenswrapper[4661]: I1001 05:41:36.742334 4661 generic.go:334] "Generic (PLEG): container finished" podID="d7850164-3b1e-4bd1-b8c8-691c54963d36" containerID="55f250bd0a502bdc7d5b7474587790decb7e3c0309fbd5196b9641aa36ed4338" exitCode=0 Oct 01 05:41:36 crc kubenswrapper[4661]: I1001 05:41:36.742796 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr" event={"ID":"d7850164-3b1e-4bd1-b8c8-691c54963d36","Type":"ContainerDied","Data":"55f250bd0a502bdc7d5b7474587790decb7e3c0309fbd5196b9641aa36ed4338"} Oct 01 05:41:38 crc kubenswrapper[4661]: I1001 05:41:38.067805 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr" Oct 01 05:41:38 crc kubenswrapper[4661]: I1001 05:41:38.233407 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wdsx\" (UniqueName: \"kubernetes.io/projected/d7850164-3b1e-4bd1-b8c8-691c54963d36-kube-api-access-8wdsx\") pod \"d7850164-3b1e-4bd1-b8c8-691c54963d36\" (UID: \"d7850164-3b1e-4bd1-b8c8-691c54963d36\") " Oct 01 05:41:38 crc kubenswrapper[4661]: I1001 05:41:38.233484 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d7850164-3b1e-4bd1-b8c8-691c54963d36-util\") pod \"d7850164-3b1e-4bd1-b8c8-691c54963d36\" (UID: \"d7850164-3b1e-4bd1-b8c8-691c54963d36\") " Oct 01 05:41:38 crc kubenswrapper[4661]: I1001 05:41:38.233571 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d7850164-3b1e-4bd1-b8c8-691c54963d36-bundle\") pod \"d7850164-3b1e-4bd1-b8c8-691c54963d36\" (UID: \"d7850164-3b1e-4bd1-b8c8-691c54963d36\") " Oct 01 05:41:38 crc kubenswrapper[4661]: I1001 05:41:38.234533 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7850164-3b1e-4bd1-b8c8-691c54963d36-bundle" (OuterVolumeSpecName: "bundle") pod "d7850164-3b1e-4bd1-b8c8-691c54963d36" (UID: "d7850164-3b1e-4bd1-b8c8-691c54963d36"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:41:38 crc kubenswrapper[4661]: I1001 05:41:38.242508 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7850164-3b1e-4bd1-b8c8-691c54963d36-kube-api-access-8wdsx" (OuterVolumeSpecName: "kube-api-access-8wdsx") pod "d7850164-3b1e-4bd1-b8c8-691c54963d36" (UID: "d7850164-3b1e-4bd1-b8c8-691c54963d36"). InnerVolumeSpecName "kube-api-access-8wdsx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:41:38 crc kubenswrapper[4661]: I1001 05:41:38.250986 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7850164-3b1e-4bd1-b8c8-691c54963d36-util" (OuterVolumeSpecName: "util") pod "d7850164-3b1e-4bd1-b8c8-691c54963d36" (UID: "d7850164-3b1e-4bd1-b8c8-691c54963d36"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:41:38 crc kubenswrapper[4661]: I1001 05:41:38.335193 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wdsx\" (UniqueName: \"kubernetes.io/projected/d7850164-3b1e-4bd1-b8c8-691c54963d36-kube-api-access-8wdsx\") on node \"crc\" DevicePath \"\"" Oct 01 05:41:38 crc kubenswrapper[4661]: I1001 05:41:38.335244 4661 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d7850164-3b1e-4bd1-b8c8-691c54963d36-util\") on node \"crc\" DevicePath \"\"" Oct 01 05:41:38 crc kubenswrapper[4661]: I1001 05:41:38.335265 4661 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d7850164-3b1e-4bd1-b8c8-691c54963d36-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:41:38 crc kubenswrapper[4661]: I1001 05:41:38.755724 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr" event={"ID":"d7850164-3b1e-4bd1-b8c8-691c54963d36","Type":"ContainerDied","Data":"c179494c56a43089d25d0540dc9bce3dd35f38ed8d2c9705a46338bcf57e5fc6"} Oct 01 05:41:38 crc kubenswrapper[4661]: I1001 05:41:38.755932 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c179494c56a43089d25d0540dc9bce3dd35f38ed8d2c9705a46338bcf57e5fc6" Oct 01 05:41:38 crc kubenswrapper[4661]: I1001 05:41:38.755793 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr" Oct 01 05:41:39 crc kubenswrapper[4661]: I1001 05:41:39.911738 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mqx4x"] Oct 01 05:41:39 crc kubenswrapper[4661]: I1001 05:41:39.912364 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" podUID="5731cb34-0698-48be-a662-79dd89e808b2" containerName="controller-manager" containerID="cri-o://f955e8e998da0ab0044056abe42e91cabb7b9e6c666c1cc908bffaad94ddaca3" gracePeriod=30 Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.003105 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2"] Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.003304 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" podUID="50a192ea-ce64-4d8c-b3e3-a19ef658aa2b" containerName="route-controller-manager" containerID="cri-o://85943220f86eef84f22c7dfd1445274e2600a4ae5f846b551ffd447d796fffed" gracePeriod=30 Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.278150 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.339132 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.379678 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5731cb34-0698-48be-a662-79dd89e808b2-config\") pod \"5731cb34-0698-48be-a662-79dd89e808b2\" (UID: \"5731cb34-0698-48be-a662-79dd89e808b2\") " Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.379726 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5731cb34-0698-48be-a662-79dd89e808b2-serving-cert\") pod \"5731cb34-0698-48be-a662-79dd89e808b2\" (UID: \"5731cb34-0698-48be-a662-79dd89e808b2\") " Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.379809 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2pr5d\" (UniqueName: \"kubernetes.io/projected/5731cb34-0698-48be-a662-79dd89e808b2-kube-api-access-2pr5d\") pod \"5731cb34-0698-48be-a662-79dd89e808b2\" (UID: \"5731cb34-0698-48be-a662-79dd89e808b2\") " Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.379832 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5731cb34-0698-48be-a662-79dd89e808b2-client-ca\") pod \"5731cb34-0698-48be-a662-79dd89e808b2\" (UID: \"5731cb34-0698-48be-a662-79dd89e808b2\") " Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.379856 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5731cb34-0698-48be-a662-79dd89e808b2-proxy-ca-bundles\") pod \"5731cb34-0698-48be-a662-79dd89e808b2\" (UID: \"5731cb34-0698-48be-a662-79dd89e808b2\") " Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.380575 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5731cb34-0698-48be-a662-79dd89e808b2-client-ca" (OuterVolumeSpecName: "client-ca") pod "5731cb34-0698-48be-a662-79dd89e808b2" (UID: "5731cb34-0698-48be-a662-79dd89e808b2"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.380672 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5731cb34-0698-48be-a662-79dd89e808b2-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "5731cb34-0698-48be-a662-79dd89e808b2" (UID: "5731cb34-0698-48be-a662-79dd89e808b2"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.381017 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5731cb34-0698-48be-a662-79dd89e808b2-config" (OuterVolumeSpecName: "config") pod "5731cb34-0698-48be-a662-79dd89e808b2" (UID: "5731cb34-0698-48be-a662-79dd89e808b2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.384835 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5731cb34-0698-48be-a662-79dd89e808b2-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5731cb34-0698-48be-a662-79dd89e808b2" (UID: "5731cb34-0698-48be-a662-79dd89e808b2"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.384919 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5731cb34-0698-48be-a662-79dd89e808b2-kube-api-access-2pr5d" (OuterVolumeSpecName: "kube-api-access-2pr5d") pod "5731cb34-0698-48be-a662-79dd89e808b2" (UID: "5731cb34-0698-48be-a662-79dd89e808b2"). InnerVolumeSpecName "kube-api-access-2pr5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.481204 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgslq\" (UniqueName: \"kubernetes.io/projected/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-kube-api-access-tgslq\") pod \"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b\" (UID: \"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b\") " Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.481343 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-client-ca\") pod \"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b\" (UID: \"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b\") " Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.481416 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-serving-cert\") pod \"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b\" (UID: \"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b\") " Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.481502 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-config\") pod \"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b\" (UID: \"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b\") " Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.482162 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2pr5d\" (UniqueName: \"kubernetes.io/projected/5731cb34-0698-48be-a662-79dd89e808b2-kube-api-access-2pr5d\") on node \"crc\" DevicePath \"\"" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.482205 4661 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5731cb34-0698-48be-a662-79dd89e808b2-client-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.482227 4661 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5731cb34-0698-48be-a662-79dd89e808b2-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.482247 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5731cb34-0698-48be-a662-79dd89e808b2-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.482265 4661 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5731cb34-0698-48be-a662-79dd89e808b2-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.482730 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-client-ca" (OuterVolumeSpecName: "client-ca") pod "50a192ea-ce64-4d8c-b3e3-a19ef658aa2b" (UID: "50a192ea-ce64-4d8c-b3e3-a19ef658aa2b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.483170 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-config" (OuterVolumeSpecName: "config") pod "50a192ea-ce64-4d8c-b3e3-a19ef658aa2b" (UID: "50a192ea-ce64-4d8c-b3e3-a19ef658aa2b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.484211 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "50a192ea-ce64-4d8c-b3e3-a19ef658aa2b" (UID: "50a192ea-ce64-4d8c-b3e3-a19ef658aa2b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.484304 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-kube-api-access-tgslq" (OuterVolumeSpecName: "kube-api-access-tgslq") pod "50a192ea-ce64-4d8c-b3e3-a19ef658aa2b" (UID: "50a192ea-ce64-4d8c-b3e3-a19ef658aa2b"). InnerVolumeSpecName "kube-api-access-tgslq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.582927 4661 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.582967 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.582982 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgslq\" (UniqueName: \"kubernetes.io/projected/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-kube-api-access-tgslq\") on node \"crc\" DevicePath \"\"" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.582996 4661 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b-client-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.780146 4661 generic.go:334] "Generic (PLEG): container finished" podID="5731cb34-0698-48be-a662-79dd89e808b2" containerID="f955e8e998da0ab0044056abe42e91cabb7b9e6c666c1cc908bffaad94ddaca3" exitCode=0 Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.780248 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" event={"ID":"5731cb34-0698-48be-a662-79dd89e808b2","Type":"ContainerDied","Data":"f955e8e998da0ab0044056abe42e91cabb7b9e6c666c1cc908bffaad94ddaca3"} Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.780276 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" event={"ID":"5731cb34-0698-48be-a662-79dd89e808b2","Type":"ContainerDied","Data":"85e1aa15c7522ddc2c516c7096cccb155410f5ae72b8d716d33f2d315fe5bdda"} Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.780298 4661 scope.go:117] "RemoveContainer" containerID="f955e8e998da0ab0044056abe42e91cabb7b9e6c666c1cc908bffaad94ddaca3" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.780424 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mqx4x" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.787496 4661 generic.go:334] "Generic (PLEG): container finished" podID="50a192ea-ce64-4d8c-b3e3-a19ef658aa2b" containerID="85943220f86eef84f22c7dfd1445274e2600a4ae5f846b551ffd447d796fffed" exitCode=0 Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.787545 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" event={"ID":"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b","Type":"ContainerDied","Data":"85943220f86eef84f22c7dfd1445274e2600a4ae5f846b551ffd447d796fffed"} Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.787602 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" event={"ID":"50a192ea-ce64-4d8c-b3e3-a19ef658aa2b","Type":"ContainerDied","Data":"c8a9c7be568c3dab9bc9bbbf8e74b5f147d711e01b1ae3c06cf499fddebf9a1f"} Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.787740 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.818814 4661 scope.go:117] "RemoveContainer" containerID="f955e8e998da0ab0044056abe42e91cabb7b9e6c666c1cc908bffaad94ddaca3" Oct 01 05:41:40 crc kubenswrapper[4661]: E1001 05:41:40.819341 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f955e8e998da0ab0044056abe42e91cabb7b9e6c666c1cc908bffaad94ddaca3\": container with ID starting with f955e8e998da0ab0044056abe42e91cabb7b9e6c666c1cc908bffaad94ddaca3 not found: ID does not exist" containerID="f955e8e998da0ab0044056abe42e91cabb7b9e6c666c1cc908bffaad94ddaca3" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.819439 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f955e8e998da0ab0044056abe42e91cabb7b9e6c666c1cc908bffaad94ddaca3"} err="failed to get container status \"f955e8e998da0ab0044056abe42e91cabb7b9e6c666c1cc908bffaad94ddaca3\": rpc error: code = NotFound desc = could not find container \"f955e8e998da0ab0044056abe42e91cabb7b9e6c666c1cc908bffaad94ddaca3\": container with ID starting with f955e8e998da0ab0044056abe42e91cabb7b9e6c666c1cc908bffaad94ddaca3 not found: ID does not exist" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.819525 4661 scope.go:117] "RemoveContainer" containerID="85943220f86eef84f22c7dfd1445274e2600a4ae5f846b551ffd447d796fffed" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.826710 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mqx4x"] Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.832761 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mqx4x"] Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.837369 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2"] Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.837843 4661 scope.go:117] "RemoveContainer" containerID="85943220f86eef84f22c7dfd1445274e2600a4ae5f846b551ffd447d796fffed" Oct 01 05:41:40 crc kubenswrapper[4661]: E1001 05:41:40.838575 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85943220f86eef84f22c7dfd1445274e2600a4ae5f846b551ffd447d796fffed\": container with ID starting with 85943220f86eef84f22c7dfd1445274e2600a4ae5f846b551ffd447d796fffed not found: ID does not exist" containerID="85943220f86eef84f22c7dfd1445274e2600a4ae5f846b551ffd447d796fffed" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.838626 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85943220f86eef84f22c7dfd1445274e2600a4ae5f846b551ffd447d796fffed"} err="failed to get container status \"85943220f86eef84f22c7dfd1445274e2600a4ae5f846b551ffd447d796fffed\": rpc error: code = NotFound desc = could not find container \"85943220f86eef84f22c7dfd1445274e2600a4ae5f846b551ffd447d796fffed\": container with ID starting with 85943220f86eef84f22c7dfd1445274e2600a4ae5f846b551ffd447d796fffed not found: ID does not exist" Oct 01 05:41:40 crc kubenswrapper[4661]: I1001 05:41:40.843611 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dbmt2"] Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.500306 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d5c6f696b-p9cvk"] Oct 01 05:41:41 crc kubenswrapper[4661]: E1001 05:41:41.501823 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5731cb34-0698-48be-a662-79dd89e808b2" containerName="controller-manager" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.501913 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="5731cb34-0698-48be-a662-79dd89e808b2" containerName="controller-manager" Oct 01 05:41:41 crc kubenswrapper[4661]: E1001 05:41:41.501991 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7850164-3b1e-4bd1-b8c8-691c54963d36" containerName="util" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.502058 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7850164-3b1e-4bd1-b8c8-691c54963d36" containerName="util" Oct 01 05:41:41 crc kubenswrapper[4661]: E1001 05:41:41.502151 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50a192ea-ce64-4d8c-b3e3-a19ef658aa2b" containerName="route-controller-manager" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.502247 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="50a192ea-ce64-4d8c-b3e3-a19ef658aa2b" containerName="route-controller-manager" Oct 01 05:41:41 crc kubenswrapper[4661]: E1001 05:41:41.502325 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="591acb44-aa58-4103-98f7-b68e067bc90d" containerName="console" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.502391 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="591acb44-aa58-4103-98f7-b68e067bc90d" containerName="console" Oct 01 05:41:41 crc kubenswrapper[4661]: E1001 05:41:41.502456 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7850164-3b1e-4bd1-b8c8-691c54963d36" containerName="extract" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.502520 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7850164-3b1e-4bd1-b8c8-691c54963d36" containerName="extract" Oct 01 05:41:41 crc kubenswrapper[4661]: E1001 05:41:41.502589 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7850164-3b1e-4bd1-b8c8-691c54963d36" containerName="pull" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.502680 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7850164-3b1e-4bd1-b8c8-691c54963d36" containerName="pull" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.502856 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="50a192ea-ce64-4d8c-b3e3-a19ef658aa2b" containerName="route-controller-manager" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.502931 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="5731cb34-0698-48be-a662-79dd89e808b2" containerName="controller-manager" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.503008 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7850164-3b1e-4bd1-b8c8-691c54963d36" containerName="extract" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.503079 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="591acb44-aa58-4103-98f7-b68e067bc90d" containerName="console" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.503601 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d5c6f696b-p9cvk" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.512027 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d5c6f696b-p9cvk"] Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.512373 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.512916 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.512924 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.513151 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.515263 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.518352 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.696355 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f-serving-cert\") pod \"route-controller-manager-d5c6f696b-p9cvk\" (UID: \"f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f\") " pod="openshift-route-controller-manager/route-controller-manager-d5c6f696b-p9cvk" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.696412 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f-config\") pod \"route-controller-manager-d5c6f696b-p9cvk\" (UID: \"f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f\") " pod="openshift-route-controller-manager/route-controller-manager-d5c6f696b-p9cvk" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.696457 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f-client-ca\") pod \"route-controller-manager-d5c6f696b-p9cvk\" (UID: \"f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f\") " pod="openshift-route-controller-manager/route-controller-manager-d5c6f696b-p9cvk" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.696485 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mg8gw\" (UniqueName: \"kubernetes.io/projected/f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f-kube-api-access-mg8gw\") pod \"route-controller-manager-d5c6f696b-p9cvk\" (UID: \"f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f\") " pod="openshift-route-controller-manager/route-controller-manager-d5c6f696b-p9cvk" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.772048 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50a192ea-ce64-4d8c-b3e3-a19ef658aa2b" path="/var/lib/kubelet/pods/50a192ea-ce64-4d8c-b3e3-a19ef658aa2b/volumes" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.772770 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5731cb34-0698-48be-a662-79dd89e808b2" path="/var/lib/kubelet/pods/5731cb34-0698-48be-a662-79dd89e808b2/volumes" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.797527 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f-serving-cert\") pod \"route-controller-manager-d5c6f696b-p9cvk\" (UID: \"f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f\") " pod="openshift-route-controller-manager/route-controller-manager-d5c6f696b-p9cvk" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.797606 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f-config\") pod \"route-controller-manager-d5c6f696b-p9cvk\" (UID: \"f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f\") " pod="openshift-route-controller-manager/route-controller-manager-d5c6f696b-p9cvk" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.797701 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f-client-ca\") pod \"route-controller-manager-d5c6f696b-p9cvk\" (UID: \"f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f\") " pod="openshift-route-controller-manager/route-controller-manager-d5c6f696b-p9cvk" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.797745 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mg8gw\" (UniqueName: \"kubernetes.io/projected/f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f-kube-api-access-mg8gw\") pod \"route-controller-manager-d5c6f696b-p9cvk\" (UID: \"f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f\") " pod="openshift-route-controller-manager/route-controller-manager-d5c6f696b-p9cvk" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.801551 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f-client-ca\") pod \"route-controller-manager-d5c6f696b-p9cvk\" (UID: \"f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f\") " pod="openshift-route-controller-manager/route-controller-manager-d5c6f696b-p9cvk" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.802020 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f-config\") pod \"route-controller-manager-d5c6f696b-p9cvk\" (UID: \"f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f\") " pod="openshift-route-controller-manager/route-controller-manager-d5c6f696b-p9cvk" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.809994 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f-serving-cert\") pod \"route-controller-manager-d5c6f696b-p9cvk\" (UID: \"f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f\") " pod="openshift-route-controller-manager/route-controller-manager-d5c6f696b-p9cvk" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.837018 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mg8gw\" (UniqueName: \"kubernetes.io/projected/f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f-kube-api-access-mg8gw\") pod \"route-controller-manager-d5c6f696b-p9cvk\" (UID: \"f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f\") " pod="openshift-route-controller-manager/route-controller-manager-d5c6f696b-p9cvk" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.873490 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5596c56b4b-qhdhp"] Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.874375 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.878097 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.878799 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.880168 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.880357 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.880497 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.885748 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.893995 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 01 05:41:41 crc kubenswrapper[4661]: I1001 05:41:41.919917 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5596c56b4b-qhdhp"] Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.000896 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9dj9\" (UniqueName: \"kubernetes.io/projected/1cd31956-724c-48b8-bbfc-82111efcf566-kube-api-access-m9dj9\") pod \"controller-manager-5596c56b4b-qhdhp\" (UID: \"1cd31956-724c-48b8-bbfc-82111efcf566\") " pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.000949 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1cd31956-724c-48b8-bbfc-82111efcf566-proxy-ca-bundles\") pod \"controller-manager-5596c56b4b-qhdhp\" (UID: \"1cd31956-724c-48b8-bbfc-82111efcf566\") " pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.001102 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1cd31956-724c-48b8-bbfc-82111efcf566-client-ca\") pod \"controller-manager-5596c56b4b-qhdhp\" (UID: \"1cd31956-724c-48b8-bbfc-82111efcf566\") " pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.001144 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cd31956-724c-48b8-bbfc-82111efcf566-config\") pod \"controller-manager-5596c56b4b-qhdhp\" (UID: \"1cd31956-724c-48b8-bbfc-82111efcf566\") " pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.001160 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cd31956-724c-48b8-bbfc-82111efcf566-serving-cert\") pod \"controller-manager-5596c56b4b-qhdhp\" (UID: \"1cd31956-724c-48b8-bbfc-82111efcf566\") " pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.102518 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1cd31956-724c-48b8-bbfc-82111efcf566-client-ca\") pod \"controller-manager-5596c56b4b-qhdhp\" (UID: \"1cd31956-724c-48b8-bbfc-82111efcf566\") " pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.102553 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cd31956-724c-48b8-bbfc-82111efcf566-config\") pod \"controller-manager-5596c56b4b-qhdhp\" (UID: \"1cd31956-724c-48b8-bbfc-82111efcf566\") " pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.102569 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cd31956-724c-48b8-bbfc-82111efcf566-serving-cert\") pod \"controller-manager-5596c56b4b-qhdhp\" (UID: \"1cd31956-724c-48b8-bbfc-82111efcf566\") " pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.102605 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9dj9\" (UniqueName: \"kubernetes.io/projected/1cd31956-724c-48b8-bbfc-82111efcf566-kube-api-access-m9dj9\") pod \"controller-manager-5596c56b4b-qhdhp\" (UID: \"1cd31956-724c-48b8-bbfc-82111efcf566\") " pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.102644 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1cd31956-724c-48b8-bbfc-82111efcf566-proxy-ca-bundles\") pod \"controller-manager-5596c56b4b-qhdhp\" (UID: \"1cd31956-724c-48b8-bbfc-82111efcf566\") " pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.103791 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1cd31956-724c-48b8-bbfc-82111efcf566-proxy-ca-bundles\") pod \"controller-manager-5596c56b4b-qhdhp\" (UID: \"1cd31956-724c-48b8-bbfc-82111efcf566\") " pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.104178 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1cd31956-724c-48b8-bbfc-82111efcf566-client-ca\") pod \"controller-manager-5596c56b4b-qhdhp\" (UID: \"1cd31956-724c-48b8-bbfc-82111efcf566\") " pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.105253 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cd31956-724c-48b8-bbfc-82111efcf566-config\") pod \"controller-manager-5596c56b4b-qhdhp\" (UID: \"1cd31956-724c-48b8-bbfc-82111efcf566\") " pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.107866 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cd31956-724c-48b8-bbfc-82111efcf566-serving-cert\") pod \"controller-manager-5596c56b4b-qhdhp\" (UID: \"1cd31956-724c-48b8-bbfc-82111efcf566\") " pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.123890 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d5c6f696b-p9cvk" Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.134532 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9dj9\" (UniqueName: \"kubernetes.io/projected/1cd31956-724c-48b8-bbfc-82111efcf566-kube-api-access-m9dj9\") pod \"controller-manager-5596c56b4b-qhdhp\" (UID: \"1cd31956-724c-48b8-bbfc-82111efcf566\") " pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.196143 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.352233 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d5c6f696b-p9cvk"] Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.657487 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5596c56b4b-qhdhp"] Oct 01 05:41:42 crc kubenswrapper[4661]: W1001 05:41:42.665878 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1cd31956_724c_48b8_bbfc_82111efcf566.slice/crio-ff24eb871734ca3f1feede2a1d539fd75d56569457891e8dbec209bb799ffc6c WatchSource:0}: Error finding container ff24eb871734ca3f1feede2a1d539fd75d56569457891e8dbec209bb799ffc6c: Status 404 returned error can't find the container with id ff24eb871734ca3f1feede2a1d539fd75d56569457891e8dbec209bb799ffc6c Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.811663 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" event={"ID":"1cd31956-724c-48b8-bbfc-82111efcf566","Type":"ContainerStarted","Data":"a99ac7de0223a88cc9b33ceba3db9d86f084d31ce8247fa4a4133b25f12e2356"} Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.811707 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" event={"ID":"1cd31956-724c-48b8-bbfc-82111efcf566","Type":"ContainerStarted","Data":"ff24eb871734ca3f1feede2a1d539fd75d56569457891e8dbec209bb799ffc6c"} Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.812595 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.813798 4661 patch_prober.go:28] interesting pod/controller-manager-5596c56b4b-qhdhp container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.65:8443/healthz\": dial tcp 10.217.0.65:8443: connect: connection refused" start-of-body= Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.813840 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" podUID="1cd31956-724c-48b8-bbfc-82111efcf566" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.65:8443/healthz\": dial tcp 10.217.0.65:8443: connect: connection refused" Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.814735 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d5c6f696b-p9cvk" event={"ID":"f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f","Type":"ContainerStarted","Data":"a77c0deeba2af3901e56abca57963b6ba1172f7aa8bdb578064fb1e1b72c0cc1"} Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.814782 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d5c6f696b-p9cvk" event={"ID":"f5fef5ae-f2b8-4e4d-a074-f0de5ee0f19f","Type":"ContainerStarted","Data":"be4022fbb910e4629d99661147168955ffccfcd585fa7141694fc9d016e7c859"} Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.815138 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-d5c6f696b-p9cvk" Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.824359 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-d5c6f696b-p9cvk" Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.847165 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" podStartSLOduration=2.84712612 podStartE2EDuration="2.84712612s" podCreationTimestamp="2025-10-01 05:41:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:41:42.836880952 +0000 UTC m=+751.774859566" watchObservedRunningTime="2025-10-01 05:41:42.84712612 +0000 UTC m=+751.785104774" Oct 01 05:41:42 crc kubenswrapper[4661]: I1001 05:41:42.868198 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-d5c6f696b-p9cvk" podStartSLOduration=1.868174392 podStartE2EDuration="1.868174392s" podCreationTimestamp="2025-10-01 05:41:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:41:42.863389897 +0000 UTC m=+751.801368581" watchObservedRunningTime="2025-10-01 05:41:42.868174392 +0000 UTC m=+751.806153016" Oct 01 05:41:43 crc kubenswrapper[4661]: I1001 05:41:43.825680 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5596c56b4b-qhdhp" Oct 01 05:41:46 crc kubenswrapper[4661]: I1001 05:41:46.105406 4661 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.265382 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-5648555f8f-nqmlz"] Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.266317 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5648555f8f-nqmlz" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.268226 4661 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.268857 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.269101 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.269610 4661 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-h6wpq" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.269815 4661 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.291656 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5648555f8f-nqmlz"] Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.378222 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27p2n\" (UniqueName: \"kubernetes.io/projected/db1e2c00-b138-4835-a53b-4cb169f585eb-kube-api-access-27p2n\") pod \"metallb-operator-controller-manager-5648555f8f-nqmlz\" (UID: \"db1e2c00-b138-4835-a53b-4cb169f585eb\") " pod="metallb-system/metallb-operator-controller-manager-5648555f8f-nqmlz" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.378314 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/db1e2c00-b138-4835-a53b-4cb169f585eb-apiservice-cert\") pod \"metallb-operator-controller-manager-5648555f8f-nqmlz\" (UID: \"db1e2c00-b138-4835-a53b-4cb169f585eb\") " pod="metallb-system/metallb-operator-controller-manager-5648555f8f-nqmlz" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.378367 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/db1e2c00-b138-4835-a53b-4cb169f585eb-webhook-cert\") pod \"metallb-operator-controller-manager-5648555f8f-nqmlz\" (UID: \"db1e2c00-b138-4835-a53b-4cb169f585eb\") " pod="metallb-system/metallb-operator-controller-manager-5648555f8f-nqmlz" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.479287 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/db1e2c00-b138-4835-a53b-4cb169f585eb-webhook-cert\") pod \"metallb-operator-controller-manager-5648555f8f-nqmlz\" (UID: \"db1e2c00-b138-4835-a53b-4cb169f585eb\") " pod="metallb-system/metallb-operator-controller-manager-5648555f8f-nqmlz" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.479354 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27p2n\" (UniqueName: \"kubernetes.io/projected/db1e2c00-b138-4835-a53b-4cb169f585eb-kube-api-access-27p2n\") pod \"metallb-operator-controller-manager-5648555f8f-nqmlz\" (UID: \"db1e2c00-b138-4835-a53b-4cb169f585eb\") " pod="metallb-system/metallb-operator-controller-manager-5648555f8f-nqmlz" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.479392 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/db1e2c00-b138-4835-a53b-4cb169f585eb-apiservice-cert\") pod \"metallb-operator-controller-manager-5648555f8f-nqmlz\" (UID: \"db1e2c00-b138-4835-a53b-4cb169f585eb\") " pod="metallb-system/metallb-operator-controller-manager-5648555f8f-nqmlz" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.485066 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/db1e2c00-b138-4835-a53b-4cb169f585eb-webhook-cert\") pod \"metallb-operator-controller-manager-5648555f8f-nqmlz\" (UID: \"db1e2c00-b138-4835-a53b-4cb169f585eb\") " pod="metallb-system/metallb-operator-controller-manager-5648555f8f-nqmlz" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.485949 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/db1e2c00-b138-4835-a53b-4cb169f585eb-apiservice-cert\") pod \"metallb-operator-controller-manager-5648555f8f-nqmlz\" (UID: \"db1e2c00-b138-4835-a53b-4cb169f585eb\") " pod="metallb-system/metallb-operator-controller-manager-5648555f8f-nqmlz" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.499780 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27p2n\" (UniqueName: \"kubernetes.io/projected/db1e2c00-b138-4835-a53b-4cb169f585eb-kube-api-access-27p2n\") pod \"metallb-operator-controller-manager-5648555f8f-nqmlz\" (UID: \"db1e2c00-b138-4835-a53b-4cb169f585eb\") " pod="metallb-system/metallb-operator-controller-manager-5648555f8f-nqmlz" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.583101 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5648555f8f-nqmlz" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.645805 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-68b4bd4bc7-stfmz"] Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.646726 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-68b4bd4bc7-stfmz" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.650048 4661 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.650121 4661 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-sw642" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.651518 4661 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.673250 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-68b4bd4bc7-stfmz"] Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.786994 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fda1aa7d-8361-476a-b52c-db60416d47c5-webhook-cert\") pod \"metallb-operator-webhook-server-68b4bd4bc7-stfmz\" (UID: \"fda1aa7d-8361-476a-b52c-db60416d47c5\") " pod="metallb-system/metallb-operator-webhook-server-68b4bd4bc7-stfmz" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.787049 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqg9d\" (UniqueName: \"kubernetes.io/projected/fda1aa7d-8361-476a-b52c-db60416d47c5-kube-api-access-nqg9d\") pod \"metallb-operator-webhook-server-68b4bd4bc7-stfmz\" (UID: \"fda1aa7d-8361-476a-b52c-db60416d47c5\") " pod="metallb-system/metallb-operator-webhook-server-68b4bd4bc7-stfmz" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.787102 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fda1aa7d-8361-476a-b52c-db60416d47c5-apiservice-cert\") pod \"metallb-operator-webhook-server-68b4bd4bc7-stfmz\" (UID: \"fda1aa7d-8361-476a-b52c-db60416d47c5\") " pod="metallb-system/metallb-operator-webhook-server-68b4bd4bc7-stfmz" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.888558 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fda1aa7d-8361-476a-b52c-db60416d47c5-webhook-cert\") pod \"metallb-operator-webhook-server-68b4bd4bc7-stfmz\" (UID: \"fda1aa7d-8361-476a-b52c-db60416d47c5\") " pod="metallb-system/metallb-operator-webhook-server-68b4bd4bc7-stfmz" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.888604 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqg9d\" (UniqueName: \"kubernetes.io/projected/fda1aa7d-8361-476a-b52c-db60416d47c5-kube-api-access-nqg9d\") pod \"metallb-operator-webhook-server-68b4bd4bc7-stfmz\" (UID: \"fda1aa7d-8361-476a-b52c-db60416d47c5\") " pod="metallb-system/metallb-operator-webhook-server-68b4bd4bc7-stfmz" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.888653 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fda1aa7d-8361-476a-b52c-db60416d47c5-apiservice-cert\") pod \"metallb-operator-webhook-server-68b4bd4bc7-stfmz\" (UID: \"fda1aa7d-8361-476a-b52c-db60416d47c5\") " pod="metallb-system/metallb-operator-webhook-server-68b4bd4bc7-stfmz" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.892666 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fda1aa7d-8361-476a-b52c-db60416d47c5-apiservice-cert\") pod \"metallb-operator-webhook-server-68b4bd4bc7-stfmz\" (UID: \"fda1aa7d-8361-476a-b52c-db60416d47c5\") " pod="metallb-system/metallb-operator-webhook-server-68b4bd4bc7-stfmz" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.902289 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fda1aa7d-8361-476a-b52c-db60416d47c5-webhook-cert\") pod \"metallb-operator-webhook-server-68b4bd4bc7-stfmz\" (UID: \"fda1aa7d-8361-476a-b52c-db60416d47c5\") " pod="metallb-system/metallb-operator-webhook-server-68b4bd4bc7-stfmz" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.904910 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqg9d\" (UniqueName: \"kubernetes.io/projected/fda1aa7d-8361-476a-b52c-db60416d47c5-kube-api-access-nqg9d\") pod \"metallb-operator-webhook-server-68b4bd4bc7-stfmz\" (UID: \"fda1aa7d-8361-476a-b52c-db60416d47c5\") " pod="metallb-system/metallb-operator-webhook-server-68b4bd4bc7-stfmz" Oct 01 05:41:48 crc kubenswrapper[4661]: I1001 05:41:48.959614 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-68b4bd4bc7-stfmz" Oct 01 05:41:49 crc kubenswrapper[4661]: I1001 05:41:49.030348 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5648555f8f-nqmlz"] Oct 01 05:41:49 crc kubenswrapper[4661]: W1001 05:41:49.039966 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddb1e2c00_b138_4835_a53b_4cb169f585eb.slice/crio-4b9259d24091b302cf6fe12bc8ab58e700302e8cdb8036f2125cfe8d068c36f1 WatchSource:0}: Error finding container 4b9259d24091b302cf6fe12bc8ab58e700302e8cdb8036f2125cfe8d068c36f1: Status 404 returned error can't find the container with id 4b9259d24091b302cf6fe12bc8ab58e700302e8cdb8036f2125cfe8d068c36f1 Oct 01 05:41:49 crc kubenswrapper[4661]: I1001 05:41:49.386526 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-68b4bd4bc7-stfmz"] Oct 01 05:41:49 crc kubenswrapper[4661]: W1001 05:41:49.389190 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfda1aa7d_8361_476a_b52c_db60416d47c5.slice/crio-312a28fcf5076a411655e238b1ac76f005ce775ec28f268380dddbcd5da0ec5c WatchSource:0}: Error finding container 312a28fcf5076a411655e238b1ac76f005ce775ec28f268380dddbcd5da0ec5c: Status 404 returned error can't find the container with id 312a28fcf5076a411655e238b1ac76f005ce775ec28f268380dddbcd5da0ec5c Oct 01 05:41:49 crc kubenswrapper[4661]: I1001 05:41:49.860320 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5648555f8f-nqmlz" event={"ID":"db1e2c00-b138-4835-a53b-4cb169f585eb","Type":"ContainerStarted","Data":"4b9259d24091b302cf6fe12bc8ab58e700302e8cdb8036f2125cfe8d068c36f1"} Oct 01 05:41:49 crc kubenswrapper[4661]: I1001 05:41:49.861759 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-68b4bd4bc7-stfmz" event={"ID":"fda1aa7d-8361-476a-b52c-db60416d47c5","Type":"ContainerStarted","Data":"312a28fcf5076a411655e238b1ac76f005ce775ec28f268380dddbcd5da0ec5c"} Oct 01 05:41:52 crc kubenswrapper[4661]: I1001 05:41:52.890087 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5648555f8f-nqmlz" event={"ID":"db1e2c00-b138-4835-a53b-4cb169f585eb","Type":"ContainerStarted","Data":"492f2872896f4bde2b5563993df766af6925bc7acea6080f74c334b74854562e"} Oct 01 05:41:52 crc kubenswrapper[4661]: I1001 05:41:52.891794 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-5648555f8f-nqmlz" Oct 01 05:41:52 crc kubenswrapper[4661]: I1001 05:41:52.918744 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-5648555f8f-nqmlz" podStartSLOduration=1.834966181 podStartE2EDuration="4.918730252s" podCreationTimestamp="2025-10-01 05:41:48 +0000 UTC" firstStartedPulling="2025-10-01 05:41:49.042685788 +0000 UTC m=+757.980664402" lastFinishedPulling="2025-10-01 05:41:52.126449859 +0000 UTC m=+761.064428473" observedRunningTime="2025-10-01 05:41:52.918562928 +0000 UTC m=+761.856541542" watchObservedRunningTime="2025-10-01 05:41:52.918730252 +0000 UTC m=+761.856708866" Oct 01 05:41:54 crc kubenswrapper[4661]: I1001 05:41:54.912667 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-68b4bd4bc7-stfmz" event={"ID":"fda1aa7d-8361-476a-b52c-db60416d47c5","Type":"ContainerStarted","Data":"533e7c742feb983b2d57718ed95abb4806c9d210c258df15a5aa92223268ba79"} Oct 01 05:41:54 crc kubenswrapper[4661]: I1001 05:41:54.943100 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-68b4bd4bc7-stfmz" podStartSLOduration=2.2665393 podStartE2EDuration="6.943072921s" podCreationTimestamp="2025-10-01 05:41:48 +0000 UTC" firstStartedPulling="2025-10-01 05:41:49.391383747 +0000 UTC m=+758.329362351" lastFinishedPulling="2025-10-01 05:41:54.067917338 +0000 UTC m=+763.005895972" observedRunningTime="2025-10-01 05:41:54.938616025 +0000 UTC m=+763.876594659" watchObservedRunningTime="2025-10-01 05:41:54.943072921 +0000 UTC m=+763.881051575" Oct 01 05:41:55 crc kubenswrapper[4661]: I1001 05:41:55.919486 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-68b4bd4bc7-stfmz" Oct 01 05:42:02 crc kubenswrapper[4661]: I1001 05:42:02.151189 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mh4cb"] Oct 01 05:42:02 crc kubenswrapper[4661]: I1001 05:42:02.157327 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mh4cb" Oct 01 05:42:02 crc kubenswrapper[4661]: I1001 05:42:02.177418 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mh4cb"] Oct 01 05:42:02 crc kubenswrapper[4661]: I1001 05:42:02.269871 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b956e15e-8d89-44b0-8409-939eca89c959-utilities\") pod \"redhat-marketplace-mh4cb\" (UID: \"b956e15e-8d89-44b0-8409-939eca89c959\") " pod="openshift-marketplace/redhat-marketplace-mh4cb" Oct 01 05:42:02 crc kubenswrapper[4661]: I1001 05:42:02.270170 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b956e15e-8d89-44b0-8409-939eca89c959-catalog-content\") pod \"redhat-marketplace-mh4cb\" (UID: \"b956e15e-8d89-44b0-8409-939eca89c959\") " pod="openshift-marketplace/redhat-marketplace-mh4cb" Oct 01 05:42:02 crc kubenswrapper[4661]: I1001 05:42:02.270286 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22vfd\" (UniqueName: \"kubernetes.io/projected/b956e15e-8d89-44b0-8409-939eca89c959-kube-api-access-22vfd\") pod \"redhat-marketplace-mh4cb\" (UID: \"b956e15e-8d89-44b0-8409-939eca89c959\") " pod="openshift-marketplace/redhat-marketplace-mh4cb" Oct 01 05:42:02 crc kubenswrapper[4661]: I1001 05:42:02.371531 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22vfd\" (UniqueName: \"kubernetes.io/projected/b956e15e-8d89-44b0-8409-939eca89c959-kube-api-access-22vfd\") pod \"redhat-marketplace-mh4cb\" (UID: \"b956e15e-8d89-44b0-8409-939eca89c959\") " pod="openshift-marketplace/redhat-marketplace-mh4cb" Oct 01 05:42:02 crc kubenswrapper[4661]: I1001 05:42:02.371662 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b956e15e-8d89-44b0-8409-939eca89c959-utilities\") pod \"redhat-marketplace-mh4cb\" (UID: \"b956e15e-8d89-44b0-8409-939eca89c959\") " pod="openshift-marketplace/redhat-marketplace-mh4cb" Oct 01 05:42:02 crc kubenswrapper[4661]: I1001 05:42:02.371719 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b956e15e-8d89-44b0-8409-939eca89c959-catalog-content\") pod \"redhat-marketplace-mh4cb\" (UID: \"b956e15e-8d89-44b0-8409-939eca89c959\") " pod="openshift-marketplace/redhat-marketplace-mh4cb" Oct 01 05:42:02 crc kubenswrapper[4661]: I1001 05:42:02.372263 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b956e15e-8d89-44b0-8409-939eca89c959-catalog-content\") pod \"redhat-marketplace-mh4cb\" (UID: \"b956e15e-8d89-44b0-8409-939eca89c959\") " pod="openshift-marketplace/redhat-marketplace-mh4cb" Oct 01 05:42:02 crc kubenswrapper[4661]: I1001 05:42:02.372323 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b956e15e-8d89-44b0-8409-939eca89c959-utilities\") pod \"redhat-marketplace-mh4cb\" (UID: \"b956e15e-8d89-44b0-8409-939eca89c959\") " pod="openshift-marketplace/redhat-marketplace-mh4cb" Oct 01 05:42:02 crc kubenswrapper[4661]: I1001 05:42:02.390321 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22vfd\" (UniqueName: \"kubernetes.io/projected/b956e15e-8d89-44b0-8409-939eca89c959-kube-api-access-22vfd\") pod \"redhat-marketplace-mh4cb\" (UID: \"b956e15e-8d89-44b0-8409-939eca89c959\") " pod="openshift-marketplace/redhat-marketplace-mh4cb" Oct 01 05:42:02 crc kubenswrapper[4661]: I1001 05:42:02.485451 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mh4cb" Oct 01 05:42:02 crc kubenswrapper[4661]: I1001 05:42:02.896165 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mh4cb"] Oct 01 05:42:02 crc kubenswrapper[4661]: I1001 05:42:02.993862 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mh4cb" event={"ID":"b956e15e-8d89-44b0-8409-939eca89c959","Type":"ContainerStarted","Data":"95f7c4de2ae4ecf229d7249e81cc3f30fa953d63202e1698a9990804d3775c87"} Oct 01 05:42:04 crc kubenswrapper[4661]: I1001 05:42:04.001653 4661 generic.go:334] "Generic (PLEG): container finished" podID="b956e15e-8d89-44b0-8409-939eca89c959" containerID="bb888d1ed971a0cee9f277cca82d580a15257b53e5bd6cada7a09c0a41bbdc71" exitCode=0 Oct 01 05:42:04 crc kubenswrapper[4661]: I1001 05:42:04.001698 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mh4cb" event={"ID":"b956e15e-8d89-44b0-8409-939eca89c959","Type":"ContainerDied","Data":"bb888d1ed971a0cee9f277cca82d580a15257b53e5bd6cada7a09c0a41bbdc71"} Oct 01 05:42:04 crc kubenswrapper[4661]: I1001 05:42:04.309206 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:42:04 crc kubenswrapper[4661]: I1001 05:42:04.309541 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:42:04 crc kubenswrapper[4661]: I1001 05:42:04.309592 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 05:42:04 crc kubenswrapper[4661]: I1001 05:42:04.310214 4661 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4e198ab64b6d3a437e0b5ab538bb0a82963de29658cbc65ddbf1c080d70a0ec7"} pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 05:42:04 crc kubenswrapper[4661]: I1001 05:42:04.310290 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" containerID="cri-o://4e198ab64b6d3a437e0b5ab538bb0a82963de29658cbc65ddbf1c080d70a0ec7" gracePeriod=600 Oct 01 05:42:05 crc kubenswrapper[4661]: I1001 05:42:05.008806 4661 generic.go:334] "Generic (PLEG): container finished" podID="b956e15e-8d89-44b0-8409-939eca89c959" containerID="bb6a6a2593d92b142aaa17b48f833185c97c06bbdf587492fc9b61f2b995bf16" exitCode=0 Oct 01 05:42:05 crc kubenswrapper[4661]: I1001 05:42:05.008921 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mh4cb" event={"ID":"b956e15e-8d89-44b0-8409-939eca89c959","Type":"ContainerDied","Data":"bb6a6a2593d92b142aaa17b48f833185c97c06bbdf587492fc9b61f2b995bf16"} Oct 01 05:42:05 crc kubenswrapper[4661]: I1001 05:42:05.011512 4661 generic.go:334] "Generic (PLEG): container finished" podID="7584c4bc-4202-487e-a2b4-4319f428a792" containerID="4e198ab64b6d3a437e0b5ab538bb0a82963de29658cbc65ddbf1c080d70a0ec7" exitCode=0 Oct 01 05:42:05 crc kubenswrapper[4661]: I1001 05:42:05.011556 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerDied","Data":"4e198ab64b6d3a437e0b5ab538bb0a82963de29658cbc65ddbf1c080d70a0ec7"} Oct 01 05:42:05 crc kubenswrapper[4661]: I1001 05:42:05.011600 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"44db5ad49645582db557448c24b5aa4a1f97fa89eaf93393d5999ec82b3f1a5e"} Oct 01 05:42:05 crc kubenswrapper[4661]: I1001 05:42:05.011619 4661 scope.go:117] "RemoveContainer" containerID="7f40270c29bb2820cb8045b305f913484b609a17b862f508419b53c27f4a7e01" Oct 01 05:42:06 crc kubenswrapper[4661]: I1001 05:42:06.021612 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mh4cb" event={"ID":"b956e15e-8d89-44b0-8409-939eca89c959","Type":"ContainerStarted","Data":"1aea65651389c87be89dfa376b748dfc9688636ec356d8c220bc0b34128088f7"} Oct 01 05:42:06 crc kubenswrapper[4661]: I1001 05:42:06.041259 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mh4cb" podStartSLOduration=2.641708791 podStartE2EDuration="4.041239492s" podCreationTimestamp="2025-10-01 05:42:02 +0000 UTC" firstStartedPulling="2025-10-01 05:42:04.003144256 +0000 UTC m=+772.941122870" lastFinishedPulling="2025-10-01 05:42:05.402674947 +0000 UTC m=+774.340653571" observedRunningTime="2025-10-01 05:42:06.037833165 +0000 UTC m=+774.975811779" watchObservedRunningTime="2025-10-01 05:42:06.041239492 +0000 UTC m=+774.979218106" Oct 01 05:42:08 crc kubenswrapper[4661]: I1001 05:42:08.967122 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-68b4bd4bc7-stfmz" Oct 01 05:42:12 crc kubenswrapper[4661]: I1001 05:42:12.486582 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mh4cb" Oct 01 05:42:12 crc kubenswrapper[4661]: I1001 05:42:12.486997 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mh4cb" Oct 01 05:42:12 crc kubenswrapper[4661]: I1001 05:42:12.553353 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mh4cb" Oct 01 05:42:13 crc kubenswrapper[4661]: I1001 05:42:13.154664 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mh4cb" Oct 01 05:42:14 crc kubenswrapper[4661]: I1001 05:42:14.951818 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mh4cb"] Oct 01 05:42:15 crc kubenswrapper[4661]: I1001 05:42:15.098256 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mh4cb" podUID="b956e15e-8d89-44b0-8409-939eca89c959" containerName="registry-server" containerID="cri-o://1aea65651389c87be89dfa376b748dfc9688636ec356d8c220bc0b34128088f7" gracePeriod=2 Oct 01 05:42:15 crc kubenswrapper[4661]: I1001 05:42:15.588600 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mh4cb" Oct 01 05:42:15 crc kubenswrapper[4661]: I1001 05:42:15.769807 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b956e15e-8d89-44b0-8409-939eca89c959-utilities\") pod \"b956e15e-8d89-44b0-8409-939eca89c959\" (UID: \"b956e15e-8d89-44b0-8409-939eca89c959\") " Oct 01 05:42:15 crc kubenswrapper[4661]: I1001 05:42:15.769930 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-22vfd\" (UniqueName: \"kubernetes.io/projected/b956e15e-8d89-44b0-8409-939eca89c959-kube-api-access-22vfd\") pod \"b956e15e-8d89-44b0-8409-939eca89c959\" (UID: \"b956e15e-8d89-44b0-8409-939eca89c959\") " Oct 01 05:42:15 crc kubenswrapper[4661]: I1001 05:42:15.769992 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b956e15e-8d89-44b0-8409-939eca89c959-catalog-content\") pod \"b956e15e-8d89-44b0-8409-939eca89c959\" (UID: \"b956e15e-8d89-44b0-8409-939eca89c959\") " Oct 01 05:42:15 crc kubenswrapper[4661]: I1001 05:42:15.771569 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b956e15e-8d89-44b0-8409-939eca89c959-utilities" (OuterVolumeSpecName: "utilities") pod "b956e15e-8d89-44b0-8409-939eca89c959" (UID: "b956e15e-8d89-44b0-8409-939eca89c959"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:42:15 crc kubenswrapper[4661]: I1001 05:42:15.778948 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b956e15e-8d89-44b0-8409-939eca89c959-kube-api-access-22vfd" (OuterVolumeSpecName: "kube-api-access-22vfd") pod "b956e15e-8d89-44b0-8409-939eca89c959" (UID: "b956e15e-8d89-44b0-8409-939eca89c959"). InnerVolumeSpecName "kube-api-access-22vfd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:42:15 crc kubenswrapper[4661]: I1001 05:42:15.785591 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b956e15e-8d89-44b0-8409-939eca89c959-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b956e15e-8d89-44b0-8409-939eca89c959" (UID: "b956e15e-8d89-44b0-8409-939eca89c959"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:42:15 crc kubenswrapper[4661]: I1001 05:42:15.871583 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-22vfd\" (UniqueName: \"kubernetes.io/projected/b956e15e-8d89-44b0-8409-939eca89c959-kube-api-access-22vfd\") on node \"crc\" DevicePath \"\"" Oct 01 05:42:15 crc kubenswrapper[4661]: I1001 05:42:15.871665 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b956e15e-8d89-44b0-8409-939eca89c959-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 05:42:15 crc kubenswrapper[4661]: I1001 05:42:15.871687 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b956e15e-8d89-44b0-8409-939eca89c959-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 05:42:16 crc kubenswrapper[4661]: I1001 05:42:16.109483 4661 generic.go:334] "Generic (PLEG): container finished" podID="b956e15e-8d89-44b0-8409-939eca89c959" containerID="1aea65651389c87be89dfa376b748dfc9688636ec356d8c220bc0b34128088f7" exitCode=0 Oct 01 05:42:16 crc kubenswrapper[4661]: I1001 05:42:16.109548 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mh4cb" event={"ID":"b956e15e-8d89-44b0-8409-939eca89c959","Type":"ContainerDied","Data":"1aea65651389c87be89dfa376b748dfc9688636ec356d8c220bc0b34128088f7"} Oct 01 05:42:16 crc kubenswrapper[4661]: I1001 05:42:16.109589 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mh4cb" event={"ID":"b956e15e-8d89-44b0-8409-939eca89c959","Type":"ContainerDied","Data":"95f7c4de2ae4ecf229d7249e81cc3f30fa953d63202e1698a9990804d3775c87"} Oct 01 05:42:16 crc kubenswrapper[4661]: I1001 05:42:16.109619 4661 scope.go:117] "RemoveContainer" containerID="1aea65651389c87be89dfa376b748dfc9688636ec356d8c220bc0b34128088f7" Oct 01 05:42:16 crc kubenswrapper[4661]: I1001 05:42:16.109839 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mh4cb" Oct 01 05:42:16 crc kubenswrapper[4661]: I1001 05:42:16.135691 4661 scope.go:117] "RemoveContainer" containerID="bb6a6a2593d92b142aaa17b48f833185c97c06bbdf587492fc9b61f2b995bf16" Oct 01 05:42:16 crc kubenswrapper[4661]: I1001 05:42:16.153430 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mh4cb"] Oct 01 05:42:16 crc kubenswrapper[4661]: I1001 05:42:16.159366 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mh4cb"] Oct 01 05:42:16 crc kubenswrapper[4661]: I1001 05:42:16.177601 4661 scope.go:117] "RemoveContainer" containerID="bb888d1ed971a0cee9f277cca82d580a15257b53e5bd6cada7a09c0a41bbdc71" Oct 01 05:42:16 crc kubenswrapper[4661]: I1001 05:42:16.203114 4661 scope.go:117] "RemoveContainer" containerID="1aea65651389c87be89dfa376b748dfc9688636ec356d8c220bc0b34128088f7" Oct 01 05:42:16 crc kubenswrapper[4661]: E1001 05:42:16.203673 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1aea65651389c87be89dfa376b748dfc9688636ec356d8c220bc0b34128088f7\": container with ID starting with 1aea65651389c87be89dfa376b748dfc9688636ec356d8c220bc0b34128088f7 not found: ID does not exist" containerID="1aea65651389c87be89dfa376b748dfc9688636ec356d8c220bc0b34128088f7" Oct 01 05:42:16 crc kubenswrapper[4661]: I1001 05:42:16.203741 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1aea65651389c87be89dfa376b748dfc9688636ec356d8c220bc0b34128088f7"} err="failed to get container status \"1aea65651389c87be89dfa376b748dfc9688636ec356d8c220bc0b34128088f7\": rpc error: code = NotFound desc = could not find container \"1aea65651389c87be89dfa376b748dfc9688636ec356d8c220bc0b34128088f7\": container with ID starting with 1aea65651389c87be89dfa376b748dfc9688636ec356d8c220bc0b34128088f7 not found: ID does not exist" Oct 01 05:42:16 crc kubenswrapper[4661]: I1001 05:42:16.203796 4661 scope.go:117] "RemoveContainer" containerID="bb6a6a2593d92b142aaa17b48f833185c97c06bbdf587492fc9b61f2b995bf16" Oct 01 05:42:16 crc kubenswrapper[4661]: E1001 05:42:16.204260 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb6a6a2593d92b142aaa17b48f833185c97c06bbdf587492fc9b61f2b995bf16\": container with ID starting with bb6a6a2593d92b142aaa17b48f833185c97c06bbdf587492fc9b61f2b995bf16 not found: ID does not exist" containerID="bb6a6a2593d92b142aaa17b48f833185c97c06bbdf587492fc9b61f2b995bf16" Oct 01 05:42:16 crc kubenswrapper[4661]: I1001 05:42:16.204305 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb6a6a2593d92b142aaa17b48f833185c97c06bbdf587492fc9b61f2b995bf16"} err="failed to get container status \"bb6a6a2593d92b142aaa17b48f833185c97c06bbdf587492fc9b61f2b995bf16\": rpc error: code = NotFound desc = could not find container \"bb6a6a2593d92b142aaa17b48f833185c97c06bbdf587492fc9b61f2b995bf16\": container with ID starting with bb6a6a2593d92b142aaa17b48f833185c97c06bbdf587492fc9b61f2b995bf16 not found: ID does not exist" Oct 01 05:42:16 crc kubenswrapper[4661]: I1001 05:42:16.204335 4661 scope.go:117] "RemoveContainer" containerID="bb888d1ed971a0cee9f277cca82d580a15257b53e5bd6cada7a09c0a41bbdc71" Oct 01 05:42:16 crc kubenswrapper[4661]: E1001 05:42:16.204884 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb888d1ed971a0cee9f277cca82d580a15257b53e5bd6cada7a09c0a41bbdc71\": container with ID starting with bb888d1ed971a0cee9f277cca82d580a15257b53e5bd6cada7a09c0a41bbdc71 not found: ID does not exist" containerID="bb888d1ed971a0cee9f277cca82d580a15257b53e5bd6cada7a09c0a41bbdc71" Oct 01 05:42:16 crc kubenswrapper[4661]: I1001 05:42:16.204922 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb888d1ed971a0cee9f277cca82d580a15257b53e5bd6cada7a09c0a41bbdc71"} err="failed to get container status \"bb888d1ed971a0cee9f277cca82d580a15257b53e5bd6cada7a09c0a41bbdc71\": rpc error: code = NotFound desc = could not find container \"bb888d1ed971a0cee9f277cca82d580a15257b53e5bd6cada7a09c0a41bbdc71\": container with ID starting with bb888d1ed971a0cee9f277cca82d580a15257b53e5bd6cada7a09c0a41bbdc71 not found: ID does not exist" Oct 01 05:42:17 crc kubenswrapper[4661]: I1001 05:42:17.770628 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b956e15e-8d89-44b0-8409-939eca89c959" path="/var/lib/kubelet/pods/b956e15e-8d89-44b0-8409-939eca89c959/volumes" Oct 01 05:42:24 crc kubenswrapper[4661]: I1001 05:42:24.764732 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ndk2z"] Oct 01 05:42:24 crc kubenswrapper[4661]: E1001 05:42:24.765595 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b956e15e-8d89-44b0-8409-939eca89c959" containerName="extract-content" Oct 01 05:42:24 crc kubenswrapper[4661]: I1001 05:42:24.765626 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="b956e15e-8d89-44b0-8409-939eca89c959" containerName="extract-content" Oct 01 05:42:24 crc kubenswrapper[4661]: E1001 05:42:24.765692 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b956e15e-8d89-44b0-8409-939eca89c959" containerName="extract-utilities" Oct 01 05:42:24 crc kubenswrapper[4661]: I1001 05:42:24.765708 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="b956e15e-8d89-44b0-8409-939eca89c959" containerName="extract-utilities" Oct 01 05:42:24 crc kubenswrapper[4661]: E1001 05:42:24.765735 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b956e15e-8d89-44b0-8409-939eca89c959" containerName="registry-server" Oct 01 05:42:24 crc kubenswrapper[4661]: I1001 05:42:24.765752 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="b956e15e-8d89-44b0-8409-939eca89c959" containerName="registry-server" Oct 01 05:42:24 crc kubenswrapper[4661]: I1001 05:42:24.766038 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="b956e15e-8d89-44b0-8409-939eca89c959" containerName="registry-server" Oct 01 05:42:24 crc kubenswrapper[4661]: I1001 05:42:24.768268 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ndk2z" Oct 01 05:42:24 crc kubenswrapper[4661]: I1001 05:42:24.815979 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ndk2z"] Oct 01 05:42:24 crc kubenswrapper[4661]: I1001 05:42:24.896568 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/569b68e0-5d1e-4594-823f-01f47d8dea6f-utilities\") pod \"community-operators-ndk2z\" (UID: \"569b68e0-5d1e-4594-823f-01f47d8dea6f\") " pod="openshift-marketplace/community-operators-ndk2z" Oct 01 05:42:24 crc kubenswrapper[4661]: I1001 05:42:24.896720 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfn2w\" (UniqueName: \"kubernetes.io/projected/569b68e0-5d1e-4594-823f-01f47d8dea6f-kube-api-access-wfn2w\") pod \"community-operators-ndk2z\" (UID: \"569b68e0-5d1e-4594-823f-01f47d8dea6f\") " pod="openshift-marketplace/community-operators-ndk2z" Oct 01 05:42:24 crc kubenswrapper[4661]: I1001 05:42:24.896951 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/569b68e0-5d1e-4594-823f-01f47d8dea6f-catalog-content\") pod \"community-operators-ndk2z\" (UID: \"569b68e0-5d1e-4594-823f-01f47d8dea6f\") " pod="openshift-marketplace/community-operators-ndk2z" Oct 01 05:42:24 crc kubenswrapper[4661]: I1001 05:42:24.998112 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/569b68e0-5d1e-4594-823f-01f47d8dea6f-catalog-content\") pod \"community-operators-ndk2z\" (UID: \"569b68e0-5d1e-4594-823f-01f47d8dea6f\") " pod="openshift-marketplace/community-operators-ndk2z" Oct 01 05:42:24 crc kubenswrapper[4661]: I1001 05:42:24.998210 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/569b68e0-5d1e-4594-823f-01f47d8dea6f-utilities\") pod \"community-operators-ndk2z\" (UID: \"569b68e0-5d1e-4594-823f-01f47d8dea6f\") " pod="openshift-marketplace/community-operators-ndk2z" Oct 01 05:42:24 crc kubenswrapper[4661]: I1001 05:42:24.998273 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfn2w\" (UniqueName: \"kubernetes.io/projected/569b68e0-5d1e-4594-823f-01f47d8dea6f-kube-api-access-wfn2w\") pod \"community-operators-ndk2z\" (UID: \"569b68e0-5d1e-4594-823f-01f47d8dea6f\") " pod="openshift-marketplace/community-operators-ndk2z" Oct 01 05:42:24 crc kubenswrapper[4661]: I1001 05:42:24.998813 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/569b68e0-5d1e-4594-823f-01f47d8dea6f-catalog-content\") pod \"community-operators-ndk2z\" (UID: \"569b68e0-5d1e-4594-823f-01f47d8dea6f\") " pod="openshift-marketplace/community-operators-ndk2z" Oct 01 05:42:24 crc kubenswrapper[4661]: I1001 05:42:24.999228 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/569b68e0-5d1e-4594-823f-01f47d8dea6f-utilities\") pod \"community-operators-ndk2z\" (UID: \"569b68e0-5d1e-4594-823f-01f47d8dea6f\") " pod="openshift-marketplace/community-operators-ndk2z" Oct 01 05:42:25 crc kubenswrapper[4661]: I1001 05:42:25.034979 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfn2w\" (UniqueName: \"kubernetes.io/projected/569b68e0-5d1e-4594-823f-01f47d8dea6f-kube-api-access-wfn2w\") pod \"community-operators-ndk2z\" (UID: \"569b68e0-5d1e-4594-823f-01f47d8dea6f\") " pod="openshift-marketplace/community-operators-ndk2z" Oct 01 05:42:25 crc kubenswrapper[4661]: I1001 05:42:25.139247 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ndk2z" Oct 01 05:42:25 crc kubenswrapper[4661]: I1001 05:42:25.600580 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ndk2z"] Oct 01 05:42:26 crc kubenswrapper[4661]: I1001 05:42:26.184879 4661 generic.go:334] "Generic (PLEG): container finished" podID="569b68e0-5d1e-4594-823f-01f47d8dea6f" containerID="a65e7c5945ea27362389f7a7ee28c9c2d91e11d6339e395ba2021790736ce76b" exitCode=0 Oct 01 05:42:26 crc kubenswrapper[4661]: I1001 05:42:26.184950 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ndk2z" event={"ID":"569b68e0-5d1e-4594-823f-01f47d8dea6f","Type":"ContainerDied","Data":"a65e7c5945ea27362389f7a7ee28c9c2d91e11d6339e395ba2021790736ce76b"} Oct 01 05:42:26 crc kubenswrapper[4661]: I1001 05:42:26.185131 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ndk2z" event={"ID":"569b68e0-5d1e-4594-823f-01f47d8dea6f","Type":"ContainerStarted","Data":"5f27c7a6a5c79f30c4fd66e9d5f43a1d40686785cbc23808098badd869eeaa28"} Oct 01 05:42:27 crc kubenswrapper[4661]: I1001 05:42:27.194503 4661 generic.go:334] "Generic (PLEG): container finished" podID="569b68e0-5d1e-4594-823f-01f47d8dea6f" containerID="c28bc345bd8f2ebd7d777bf31e55c3c172884452b0747e350c6f907764a66f74" exitCode=0 Oct 01 05:42:27 crc kubenswrapper[4661]: I1001 05:42:27.194554 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ndk2z" event={"ID":"569b68e0-5d1e-4594-823f-01f47d8dea6f","Type":"ContainerDied","Data":"c28bc345bd8f2ebd7d777bf31e55c3c172884452b0747e350c6f907764a66f74"} Oct 01 05:42:28 crc kubenswrapper[4661]: I1001 05:42:28.205123 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ndk2z" event={"ID":"569b68e0-5d1e-4594-823f-01f47d8dea6f","Type":"ContainerStarted","Data":"f7fa8f474355c72cbe492e7bf7d1d05abb16ed5aa0fe651099812ca8ff4a8f08"} Oct 01 05:42:28 crc kubenswrapper[4661]: I1001 05:42:28.237552 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ndk2z" podStartSLOduration=2.744723451 podStartE2EDuration="4.237520972s" podCreationTimestamp="2025-10-01 05:42:24 +0000 UTC" firstStartedPulling="2025-10-01 05:42:26.186684409 +0000 UTC m=+795.124663033" lastFinishedPulling="2025-10-01 05:42:27.67948191 +0000 UTC m=+796.617460554" observedRunningTime="2025-10-01 05:42:28.232394597 +0000 UTC m=+797.170373211" watchObservedRunningTime="2025-10-01 05:42:28.237520972 +0000 UTC m=+797.175499626" Oct 01 05:42:28 crc kubenswrapper[4661]: I1001 05:42:28.587973 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-5648555f8f-nqmlz" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.544595 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-zp8cf"] Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.545557 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zp8cf" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.547723 4661 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-xstxj" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.548512 4661 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.553563 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-jk5qx"] Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.555670 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.561216 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-zp8cf"] Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.561340 4661 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.562681 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.619046 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-x4czl"] Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.619883 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-x4czl" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.622171 4661 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.622186 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.622186 4661 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.622430 4661 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-hqr72" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.642593 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5d688f5ffc-xfbgw"] Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.643431 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-xfbgw" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.645747 4661 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.661079 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/6872061b-610d-47c4-bcea-d3a9b9e507f4-frr-conf\") pod \"frr-k8s-jk5qx\" (UID: \"6872061b-610d-47c4-bcea-d3a9b9e507f4\") " pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.661143 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/6872061b-610d-47c4-bcea-d3a9b9e507f4-metrics\") pod \"frr-k8s-jk5qx\" (UID: \"6872061b-610d-47c4-bcea-d3a9b9e507f4\") " pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.661166 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/6872061b-610d-47c4-bcea-d3a9b9e507f4-frr-startup\") pod \"frr-k8s-jk5qx\" (UID: \"6872061b-610d-47c4-bcea-d3a9b9e507f4\") " pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.661221 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6872061b-610d-47c4-bcea-d3a9b9e507f4-metrics-certs\") pod \"frr-k8s-jk5qx\" (UID: \"6872061b-610d-47c4-bcea-d3a9b9e507f4\") " pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.661251 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4tsp\" (UniqueName: \"kubernetes.io/projected/41e4d93f-6473-4aff-a0b0-e76588bdf2a6-kube-api-access-j4tsp\") pod \"frr-k8s-webhook-server-5478bdb765-zp8cf\" (UID: \"41e4d93f-6473-4aff-a0b0-e76588bdf2a6\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zp8cf" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.661319 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/6872061b-610d-47c4-bcea-d3a9b9e507f4-reloader\") pod \"frr-k8s-jk5qx\" (UID: \"6872061b-610d-47c4-bcea-d3a9b9e507f4\") " pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.661365 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7mcc\" (UniqueName: \"kubernetes.io/projected/6872061b-610d-47c4-bcea-d3a9b9e507f4-kube-api-access-t7mcc\") pod \"frr-k8s-jk5qx\" (UID: \"6872061b-610d-47c4-bcea-d3a9b9e507f4\") " pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.661387 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/41e4d93f-6473-4aff-a0b0-e76588bdf2a6-cert\") pod \"frr-k8s-webhook-server-5478bdb765-zp8cf\" (UID: \"41e4d93f-6473-4aff-a0b0-e76588bdf2a6\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zp8cf" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.661433 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/6872061b-610d-47c4-bcea-d3a9b9e507f4-frr-sockets\") pod \"frr-k8s-jk5qx\" (UID: \"6872061b-610d-47c4-bcea-d3a9b9e507f4\") " pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.663744 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-xfbgw"] Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.762952 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/6872061b-610d-47c4-bcea-d3a9b9e507f4-frr-startup\") pod \"frr-k8s-jk5qx\" (UID: \"6872061b-610d-47c4-bcea-d3a9b9e507f4\") " pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.763241 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xr99w\" (UniqueName: \"kubernetes.io/projected/27288f70-f17e-4362-b115-c0c69e26aa91-kube-api-access-xr99w\") pod \"controller-5d688f5ffc-xfbgw\" (UID: \"27288f70-f17e-4362-b115-c0c69e26aa91\") " pod="metallb-system/controller-5d688f5ffc-xfbgw" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.763372 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6872061b-610d-47c4-bcea-d3a9b9e507f4-metrics-certs\") pod \"frr-k8s-jk5qx\" (UID: \"6872061b-610d-47c4-bcea-d3a9b9e507f4\") " pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.763561 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4tsp\" (UniqueName: \"kubernetes.io/projected/41e4d93f-6473-4aff-a0b0-e76588bdf2a6-kube-api-access-j4tsp\") pod \"frr-k8s-webhook-server-5478bdb765-zp8cf\" (UID: \"41e4d93f-6473-4aff-a0b0-e76588bdf2a6\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zp8cf" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.763774 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlhnc\" (UniqueName: \"kubernetes.io/projected/41216ce7-4a7f-43c7-995e-081c9849f9bf-kube-api-access-tlhnc\") pod \"speaker-x4czl\" (UID: \"41216ce7-4a7f-43c7-995e-081c9849f9bf\") " pod="metallb-system/speaker-x4czl" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.763906 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/6872061b-610d-47c4-bcea-d3a9b9e507f4-reloader\") pod \"frr-k8s-jk5qx\" (UID: \"6872061b-610d-47c4-bcea-d3a9b9e507f4\") " pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.763999 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27288f70-f17e-4362-b115-c0c69e26aa91-metrics-certs\") pod \"controller-5d688f5ffc-xfbgw\" (UID: \"27288f70-f17e-4362-b115-c0c69e26aa91\") " pod="metallb-system/controller-5d688f5ffc-xfbgw" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.764109 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/41216ce7-4a7f-43c7-995e-081c9849f9bf-memberlist\") pod \"speaker-x4czl\" (UID: \"41216ce7-4a7f-43c7-995e-081c9849f9bf\") " pod="metallb-system/speaker-x4czl" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.764230 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7mcc\" (UniqueName: \"kubernetes.io/projected/6872061b-610d-47c4-bcea-d3a9b9e507f4-kube-api-access-t7mcc\") pod \"frr-k8s-jk5qx\" (UID: \"6872061b-610d-47c4-bcea-d3a9b9e507f4\") " pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.764351 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/41216ce7-4a7f-43c7-995e-081c9849f9bf-metrics-certs\") pod \"speaker-x4czl\" (UID: \"41216ce7-4a7f-43c7-995e-081c9849f9bf\") " pod="metallb-system/speaker-x4czl" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.764489 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/41e4d93f-6473-4aff-a0b0-e76588bdf2a6-cert\") pod \"frr-k8s-webhook-server-5478bdb765-zp8cf\" (UID: \"41e4d93f-6473-4aff-a0b0-e76588bdf2a6\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zp8cf" Oct 01 05:42:29 crc kubenswrapper[4661]: E1001 05:42:29.764611 4661 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Oct 01 05:42:29 crc kubenswrapper[4661]: E1001 05:42:29.764692 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/41e4d93f-6473-4aff-a0b0-e76588bdf2a6-cert podName:41e4d93f-6473-4aff-a0b0-e76588bdf2a6 nodeName:}" failed. No retries permitted until 2025-10-01 05:42:30.264674138 +0000 UTC m=+799.202652752 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/41e4d93f-6473-4aff-a0b0-e76588bdf2a6-cert") pod "frr-k8s-webhook-server-5478bdb765-zp8cf" (UID: "41e4d93f-6473-4aff-a0b0-e76588bdf2a6") : secret "frr-k8s-webhook-server-cert" not found Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.764426 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/6872061b-610d-47c4-bcea-d3a9b9e507f4-reloader\") pod \"frr-k8s-jk5qx\" (UID: \"6872061b-610d-47c4-bcea-d3a9b9e507f4\") " pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.764624 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/41216ce7-4a7f-43c7-995e-081c9849f9bf-metallb-excludel2\") pod \"speaker-x4czl\" (UID: \"41216ce7-4a7f-43c7-995e-081c9849f9bf\") " pod="metallb-system/speaker-x4czl" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.765009 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/6872061b-610d-47c4-bcea-d3a9b9e507f4-frr-sockets\") pod \"frr-k8s-jk5qx\" (UID: \"6872061b-610d-47c4-bcea-d3a9b9e507f4\") " pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.765159 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/27288f70-f17e-4362-b115-c0c69e26aa91-cert\") pod \"controller-5d688f5ffc-xfbgw\" (UID: \"27288f70-f17e-4362-b115-c0c69e26aa91\") " pod="metallb-system/controller-5d688f5ffc-xfbgw" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.765293 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/6872061b-610d-47c4-bcea-d3a9b9e507f4-frr-conf\") pod \"frr-k8s-jk5qx\" (UID: \"6872061b-610d-47c4-bcea-d3a9b9e507f4\") " pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.765433 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/6872061b-610d-47c4-bcea-d3a9b9e507f4-metrics\") pod \"frr-k8s-jk5qx\" (UID: \"6872061b-610d-47c4-bcea-d3a9b9e507f4\") " pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.765504 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/6872061b-610d-47c4-bcea-d3a9b9e507f4-frr-conf\") pod \"frr-k8s-jk5qx\" (UID: \"6872061b-610d-47c4-bcea-d3a9b9e507f4\") " pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.765250 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/6872061b-610d-47c4-bcea-d3a9b9e507f4-frr-sockets\") pod \"frr-k8s-jk5qx\" (UID: \"6872061b-610d-47c4-bcea-d3a9b9e507f4\") " pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.765703 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/6872061b-610d-47c4-bcea-d3a9b9e507f4-frr-startup\") pod \"frr-k8s-jk5qx\" (UID: \"6872061b-610d-47c4-bcea-d3a9b9e507f4\") " pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.765894 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/6872061b-610d-47c4-bcea-d3a9b9e507f4-metrics\") pod \"frr-k8s-jk5qx\" (UID: \"6872061b-610d-47c4-bcea-d3a9b9e507f4\") " pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.788257 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6872061b-610d-47c4-bcea-d3a9b9e507f4-metrics-certs\") pod \"frr-k8s-jk5qx\" (UID: \"6872061b-610d-47c4-bcea-d3a9b9e507f4\") " pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.792034 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4tsp\" (UniqueName: \"kubernetes.io/projected/41e4d93f-6473-4aff-a0b0-e76588bdf2a6-kube-api-access-j4tsp\") pod \"frr-k8s-webhook-server-5478bdb765-zp8cf\" (UID: \"41e4d93f-6473-4aff-a0b0-e76588bdf2a6\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zp8cf" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.794856 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7mcc\" (UniqueName: \"kubernetes.io/projected/6872061b-610d-47c4-bcea-d3a9b9e507f4-kube-api-access-t7mcc\") pod \"frr-k8s-jk5qx\" (UID: \"6872061b-610d-47c4-bcea-d3a9b9e507f4\") " pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.867296 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.867646 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlhnc\" (UniqueName: \"kubernetes.io/projected/41216ce7-4a7f-43c7-995e-081c9849f9bf-kube-api-access-tlhnc\") pod \"speaker-x4czl\" (UID: \"41216ce7-4a7f-43c7-995e-081c9849f9bf\") " pod="metallb-system/speaker-x4czl" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.867705 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27288f70-f17e-4362-b115-c0c69e26aa91-metrics-certs\") pod \"controller-5d688f5ffc-xfbgw\" (UID: \"27288f70-f17e-4362-b115-c0c69e26aa91\") " pod="metallb-system/controller-5d688f5ffc-xfbgw" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.867730 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/41216ce7-4a7f-43c7-995e-081c9849f9bf-memberlist\") pod \"speaker-x4czl\" (UID: \"41216ce7-4a7f-43c7-995e-081c9849f9bf\") " pod="metallb-system/speaker-x4czl" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.867772 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/41216ce7-4a7f-43c7-995e-081c9849f9bf-metrics-certs\") pod \"speaker-x4czl\" (UID: \"41216ce7-4a7f-43c7-995e-081c9849f9bf\") " pod="metallb-system/speaker-x4czl" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.867802 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/41216ce7-4a7f-43c7-995e-081c9849f9bf-metallb-excludel2\") pod \"speaker-x4czl\" (UID: \"41216ce7-4a7f-43c7-995e-081c9849f9bf\") " pod="metallb-system/speaker-x4czl" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.867828 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/27288f70-f17e-4362-b115-c0c69e26aa91-cert\") pod \"controller-5d688f5ffc-xfbgw\" (UID: \"27288f70-f17e-4362-b115-c0c69e26aa91\") " pod="metallb-system/controller-5d688f5ffc-xfbgw" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.867859 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xr99w\" (UniqueName: \"kubernetes.io/projected/27288f70-f17e-4362-b115-c0c69e26aa91-kube-api-access-xr99w\") pod \"controller-5d688f5ffc-xfbgw\" (UID: \"27288f70-f17e-4362-b115-c0c69e26aa91\") " pod="metallb-system/controller-5d688f5ffc-xfbgw" Oct 01 05:42:29 crc kubenswrapper[4661]: E1001 05:42:29.867957 4661 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 01 05:42:29 crc kubenswrapper[4661]: E1001 05:42:29.868009 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/41216ce7-4a7f-43c7-995e-081c9849f9bf-memberlist podName:41216ce7-4a7f-43c7-995e-081c9849f9bf nodeName:}" failed. No retries permitted until 2025-10-01 05:42:30.367992641 +0000 UTC m=+799.305971255 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/41216ce7-4a7f-43c7-995e-081c9849f9bf-memberlist") pod "speaker-x4czl" (UID: "41216ce7-4a7f-43c7-995e-081c9849f9bf") : secret "metallb-memberlist" not found Oct 01 05:42:29 crc kubenswrapper[4661]: E1001 05:42:29.868163 4661 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Oct 01 05:42:29 crc kubenswrapper[4661]: E1001 05:42:29.868210 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/41216ce7-4a7f-43c7-995e-081c9849f9bf-metrics-certs podName:41216ce7-4a7f-43c7-995e-081c9849f9bf nodeName:}" failed. No retries permitted until 2025-10-01 05:42:30.368193767 +0000 UTC m=+799.306172501 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/41216ce7-4a7f-43c7-995e-081c9849f9bf-metrics-certs") pod "speaker-x4czl" (UID: "41216ce7-4a7f-43c7-995e-081c9849f9bf") : secret "speaker-certs-secret" not found Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.868891 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/41216ce7-4a7f-43c7-995e-081c9849f9bf-metallb-excludel2\") pod \"speaker-x4czl\" (UID: \"41216ce7-4a7f-43c7-995e-081c9849f9bf\") " pod="metallb-system/speaker-x4czl" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.871748 4661 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.872072 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27288f70-f17e-4362-b115-c0c69e26aa91-metrics-certs\") pod \"controller-5d688f5ffc-xfbgw\" (UID: \"27288f70-f17e-4362-b115-c0c69e26aa91\") " pod="metallb-system/controller-5d688f5ffc-xfbgw" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.882027 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/27288f70-f17e-4362-b115-c0c69e26aa91-cert\") pod \"controller-5d688f5ffc-xfbgw\" (UID: \"27288f70-f17e-4362-b115-c0c69e26aa91\") " pod="metallb-system/controller-5d688f5ffc-xfbgw" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.887336 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xr99w\" (UniqueName: \"kubernetes.io/projected/27288f70-f17e-4362-b115-c0c69e26aa91-kube-api-access-xr99w\") pod \"controller-5d688f5ffc-xfbgw\" (UID: \"27288f70-f17e-4362-b115-c0c69e26aa91\") " pod="metallb-system/controller-5d688f5ffc-xfbgw" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.887481 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlhnc\" (UniqueName: \"kubernetes.io/projected/41216ce7-4a7f-43c7-995e-081c9849f9bf-kube-api-access-tlhnc\") pod \"speaker-x4czl\" (UID: \"41216ce7-4a7f-43c7-995e-081c9849f9bf\") " pod="metallb-system/speaker-x4czl" Oct 01 05:42:29 crc kubenswrapper[4661]: I1001 05:42:29.955429 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-xfbgw" Oct 01 05:42:30 crc kubenswrapper[4661]: I1001 05:42:30.223703 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jk5qx" event={"ID":"6872061b-610d-47c4-bcea-d3a9b9e507f4","Type":"ContainerStarted","Data":"863c9372d5eb144a271f4876b55cbfce000f1a379230f1e57ac24687de2b94ff"} Oct 01 05:42:30 crc kubenswrapper[4661]: I1001 05:42:30.273118 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/41e4d93f-6473-4aff-a0b0-e76588bdf2a6-cert\") pod \"frr-k8s-webhook-server-5478bdb765-zp8cf\" (UID: \"41e4d93f-6473-4aff-a0b0-e76588bdf2a6\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zp8cf" Oct 01 05:42:30 crc kubenswrapper[4661]: I1001 05:42:30.281544 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/41e4d93f-6473-4aff-a0b0-e76588bdf2a6-cert\") pod \"frr-k8s-webhook-server-5478bdb765-zp8cf\" (UID: \"41e4d93f-6473-4aff-a0b0-e76588bdf2a6\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zp8cf" Oct 01 05:42:30 crc kubenswrapper[4661]: I1001 05:42:30.374784 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/41216ce7-4a7f-43c7-995e-081c9849f9bf-metrics-certs\") pod \"speaker-x4czl\" (UID: \"41216ce7-4a7f-43c7-995e-081c9849f9bf\") " pod="metallb-system/speaker-x4czl" Oct 01 05:42:30 crc kubenswrapper[4661]: I1001 05:42:30.375036 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/41216ce7-4a7f-43c7-995e-081c9849f9bf-memberlist\") pod \"speaker-x4czl\" (UID: \"41216ce7-4a7f-43c7-995e-081c9849f9bf\") " pod="metallb-system/speaker-x4czl" Oct 01 05:42:30 crc kubenswrapper[4661]: E1001 05:42:30.375226 4661 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 01 05:42:30 crc kubenswrapper[4661]: E1001 05:42:30.375299 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/41216ce7-4a7f-43c7-995e-081c9849f9bf-memberlist podName:41216ce7-4a7f-43c7-995e-081c9849f9bf nodeName:}" failed. No retries permitted until 2025-10-01 05:42:31.375274217 +0000 UTC m=+800.313252871 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/41216ce7-4a7f-43c7-995e-081c9849f9bf-memberlist") pod "speaker-x4czl" (UID: "41216ce7-4a7f-43c7-995e-081c9849f9bf") : secret "metallb-memberlist" not found Oct 01 05:42:30 crc kubenswrapper[4661]: I1001 05:42:30.377685 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-xfbgw"] Oct 01 05:42:30 crc kubenswrapper[4661]: I1001 05:42:30.381779 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/41216ce7-4a7f-43c7-995e-081c9849f9bf-metrics-certs\") pod \"speaker-x4czl\" (UID: \"41216ce7-4a7f-43c7-995e-081c9849f9bf\") " pod="metallb-system/speaker-x4czl" Oct 01 05:42:30 crc kubenswrapper[4661]: W1001 05:42:30.386547 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod27288f70_f17e_4362_b115_c0c69e26aa91.slice/crio-e45cffa3e1e5d3693d6e22b2454b696e1681c6e4e839e131f08d5a0a68b304ab WatchSource:0}: Error finding container e45cffa3e1e5d3693d6e22b2454b696e1681c6e4e839e131f08d5a0a68b304ab: Status 404 returned error can't find the container with id e45cffa3e1e5d3693d6e22b2454b696e1681c6e4e839e131f08d5a0a68b304ab Oct 01 05:42:30 crc kubenswrapper[4661]: I1001 05:42:30.458490 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zp8cf" Oct 01 05:42:30 crc kubenswrapper[4661]: I1001 05:42:30.951204 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-zp8cf"] Oct 01 05:42:30 crc kubenswrapper[4661]: W1001 05:42:30.959115 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod41e4d93f_6473_4aff_a0b0_e76588bdf2a6.slice/crio-283f2f059d6921f8edd788dfd5c530a95de97e6c90df83747ade7f09c3a76ccd WatchSource:0}: Error finding container 283f2f059d6921f8edd788dfd5c530a95de97e6c90df83747ade7f09c3a76ccd: Status 404 returned error can't find the container with id 283f2f059d6921f8edd788dfd5c530a95de97e6c90df83747ade7f09c3a76ccd Oct 01 05:42:31 crc kubenswrapper[4661]: I1001 05:42:31.230185 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zp8cf" event={"ID":"41e4d93f-6473-4aff-a0b0-e76588bdf2a6","Type":"ContainerStarted","Data":"283f2f059d6921f8edd788dfd5c530a95de97e6c90df83747ade7f09c3a76ccd"} Oct 01 05:42:31 crc kubenswrapper[4661]: I1001 05:42:31.231886 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-xfbgw" event={"ID":"27288f70-f17e-4362-b115-c0c69e26aa91","Type":"ContainerStarted","Data":"6329cdad3d7f94464fe5b67d24c1329a785c2f03496554ab54e4ec55ab0d5263"} Oct 01 05:42:31 crc kubenswrapper[4661]: I1001 05:42:31.231908 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-xfbgw" event={"ID":"27288f70-f17e-4362-b115-c0c69e26aa91","Type":"ContainerStarted","Data":"d887811c9448e280669c371e1bd3f2ff4f010bb68b11462e43dce6441bee62d2"} Oct 01 05:42:31 crc kubenswrapper[4661]: I1001 05:42:31.231925 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-xfbgw" event={"ID":"27288f70-f17e-4362-b115-c0c69e26aa91","Type":"ContainerStarted","Data":"e45cffa3e1e5d3693d6e22b2454b696e1681c6e4e839e131f08d5a0a68b304ab"} Oct 01 05:42:31 crc kubenswrapper[4661]: I1001 05:42:31.232040 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5d688f5ffc-xfbgw" Oct 01 05:42:31 crc kubenswrapper[4661]: I1001 05:42:31.259001 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5d688f5ffc-xfbgw" podStartSLOduration=2.25898189 podStartE2EDuration="2.25898189s" podCreationTimestamp="2025-10-01 05:42:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:42:31.254867255 +0000 UTC m=+800.192845879" watchObservedRunningTime="2025-10-01 05:42:31.25898189 +0000 UTC m=+800.196960524" Oct 01 05:42:31 crc kubenswrapper[4661]: I1001 05:42:31.394665 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/41216ce7-4a7f-43c7-995e-081c9849f9bf-memberlist\") pod \"speaker-x4czl\" (UID: \"41216ce7-4a7f-43c7-995e-081c9849f9bf\") " pod="metallb-system/speaker-x4czl" Oct 01 05:42:31 crc kubenswrapper[4661]: I1001 05:42:31.399461 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/41216ce7-4a7f-43c7-995e-081c9849f9bf-memberlist\") pod \"speaker-x4czl\" (UID: \"41216ce7-4a7f-43c7-995e-081c9849f9bf\") " pod="metallb-system/speaker-x4czl" Oct 01 05:42:31 crc kubenswrapper[4661]: I1001 05:42:31.432147 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-x4czl" Oct 01 05:42:31 crc kubenswrapper[4661]: W1001 05:42:31.458038 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod41216ce7_4a7f_43c7_995e_081c9849f9bf.slice/crio-a7d53308b726620d3e473786e60743036133db4f1fe990a3a6f3b244e38b3c45 WatchSource:0}: Error finding container a7d53308b726620d3e473786e60743036133db4f1fe990a3a6f3b244e38b3c45: Status 404 returned error can't find the container with id a7d53308b726620d3e473786e60743036133db4f1fe990a3a6f3b244e38b3c45 Oct 01 05:42:32 crc kubenswrapper[4661]: I1001 05:42:32.242076 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-x4czl" event={"ID":"41216ce7-4a7f-43c7-995e-081c9849f9bf","Type":"ContainerStarted","Data":"a295e2e12f46cc505723a3be529d1ed37da6692fe08c93d60e01edb4cdedcd92"} Oct 01 05:42:32 crc kubenswrapper[4661]: I1001 05:42:32.242468 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-x4czl" event={"ID":"41216ce7-4a7f-43c7-995e-081c9849f9bf","Type":"ContainerStarted","Data":"429680e670e44cd6bf341d2a79e42d5de8d4f9fbc4887bccd6426b632d9154bb"} Oct 01 05:42:32 crc kubenswrapper[4661]: I1001 05:42:32.242485 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-x4czl" event={"ID":"41216ce7-4a7f-43c7-995e-081c9849f9bf","Type":"ContainerStarted","Data":"a7d53308b726620d3e473786e60743036133db4f1fe990a3a6f3b244e38b3c45"} Oct 01 05:42:32 crc kubenswrapper[4661]: I1001 05:42:32.242689 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-x4czl" Oct 01 05:42:32 crc kubenswrapper[4661]: I1001 05:42:32.264991 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-x4czl" podStartSLOduration=3.264971631 podStartE2EDuration="3.264971631s" podCreationTimestamp="2025-10-01 05:42:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:42:32.260366292 +0000 UTC m=+801.198344906" watchObservedRunningTime="2025-10-01 05:42:32.264971631 +0000 UTC m=+801.202950245" Oct 01 05:42:35 crc kubenswrapper[4661]: I1001 05:42:35.139890 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ndk2z" Oct 01 05:42:35 crc kubenswrapper[4661]: I1001 05:42:35.140293 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ndk2z" Oct 01 05:42:35 crc kubenswrapper[4661]: I1001 05:42:35.192201 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ndk2z" Oct 01 05:42:35 crc kubenswrapper[4661]: I1001 05:42:35.306203 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ndk2z" Oct 01 05:42:37 crc kubenswrapper[4661]: I1001 05:42:37.274977 4661 generic.go:334] "Generic (PLEG): container finished" podID="6872061b-610d-47c4-bcea-d3a9b9e507f4" containerID="21a522290a70c53f3296a910ac437afa589af2e34c8a224f84abfb6d37de9684" exitCode=0 Oct 01 05:42:37 crc kubenswrapper[4661]: I1001 05:42:37.275092 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jk5qx" event={"ID":"6872061b-610d-47c4-bcea-d3a9b9e507f4","Type":"ContainerDied","Data":"21a522290a70c53f3296a910ac437afa589af2e34c8a224f84abfb6d37de9684"} Oct 01 05:42:37 crc kubenswrapper[4661]: I1001 05:42:37.278501 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zp8cf" event={"ID":"41e4d93f-6473-4aff-a0b0-e76588bdf2a6","Type":"ContainerStarted","Data":"90760e918edd341b0a9e94c8763314c81ef274a9da7873357205985664d3390f"} Oct 01 05:42:37 crc kubenswrapper[4661]: I1001 05:42:37.278708 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zp8cf" Oct 01 05:42:37 crc kubenswrapper[4661]: I1001 05:42:37.547822 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zp8cf" podStartSLOduration=2.793263325 podStartE2EDuration="8.547787486s" podCreationTimestamp="2025-10-01 05:42:29 +0000 UTC" firstStartedPulling="2025-10-01 05:42:30.960815962 +0000 UTC m=+799.898794576" lastFinishedPulling="2025-10-01 05:42:36.715340093 +0000 UTC m=+805.653318737" observedRunningTime="2025-10-01 05:42:37.327062094 +0000 UTC m=+806.265040748" watchObservedRunningTime="2025-10-01 05:42:37.547787486 +0000 UTC m=+806.485766190" Oct 01 05:42:37 crc kubenswrapper[4661]: I1001 05:42:37.555420 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ndk2z"] Oct 01 05:42:37 crc kubenswrapper[4661]: I1001 05:42:37.555783 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ndk2z" podUID="569b68e0-5d1e-4594-823f-01f47d8dea6f" containerName="registry-server" containerID="cri-o://f7fa8f474355c72cbe492e7bf7d1d05abb16ed5aa0fe651099812ca8ff4a8f08" gracePeriod=2 Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.074792 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ndk2z" Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.211965 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/569b68e0-5d1e-4594-823f-01f47d8dea6f-utilities\") pod \"569b68e0-5d1e-4594-823f-01f47d8dea6f\" (UID: \"569b68e0-5d1e-4594-823f-01f47d8dea6f\") " Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.212058 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wfn2w\" (UniqueName: \"kubernetes.io/projected/569b68e0-5d1e-4594-823f-01f47d8dea6f-kube-api-access-wfn2w\") pod \"569b68e0-5d1e-4594-823f-01f47d8dea6f\" (UID: \"569b68e0-5d1e-4594-823f-01f47d8dea6f\") " Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.212103 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/569b68e0-5d1e-4594-823f-01f47d8dea6f-catalog-content\") pod \"569b68e0-5d1e-4594-823f-01f47d8dea6f\" (UID: \"569b68e0-5d1e-4594-823f-01f47d8dea6f\") " Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.213093 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/569b68e0-5d1e-4594-823f-01f47d8dea6f-utilities" (OuterVolumeSpecName: "utilities") pod "569b68e0-5d1e-4594-823f-01f47d8dea6f" (UID: "569b68e0-5d1e-4594-823f-01f47d8dea6f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.225092 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/569b68e0-5d1e-4594-823f-01f47d8dea6f-kube-api-access-wfn2w" (OuterVolumeSpecName: "kube-api-access-wfn2w") pod "569b68e0-5d1e-4594-823f-01f47d8dea6f" (UID: "569b68e0-5d1e-4594-823f-01f47d8dea6f"). InnerVolumeSpecName "kube-api-access-wfn2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.288709 4661 generic.go:334] "Generic (PLEG): container finished" podID="569b68e0-5d1e-4594-823f-01f47d8dea6f" containerID="f7fa8f474355c72cbe492e7bf7d1d05abb16ed5aa0fe651099812ca8ff4a8f08" exitCode=0 Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.288761 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ndk2z" Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.288811 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ndk2z" event={"ID":"569b68e0-5d1e-4594-823f-01f47d8dea6f","Type":"ContainerDied","Data":"f7fa8f474355c72cbe492e7bf7d1d05abb16ed5aa0fe651099812ca8ff4a8f08"} Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.288884 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ndk2z" event={"ID":"569b68e0-5d1e-4594-823f-01f47d8dea6f","Type":"ContainerDied","Data":"5f27c7a6a5c79f30c4fd66e9d5f43a1d40686785cbc23808098badd869eeaa28"} Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.288927 4661 scope.go:117] "RemoveContainer" containerID="f7fa8f474355c72cbe492e7bf7d1d05abb16ed5aa0fe651099812ca8ff4a8f08" Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.291440 4661 generic.go:334] "Generic (PLEG): container finished" podID="6872061b-610d-47c4-bcea-d3a9b9e507f4" containerID="05a46f4841ba119aa334fb5782f4950d26ba70fc23eb4d00897621e1a1e2e9be" exitCode=0 Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.291720 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jk5qx" event={"ID":"6872061b-610d-47c4-bcea-d3a9b9e507f4","Type":"ContainerDied","Data":"05a46f4841ba119aa334fb5782f4950d26ba70fc23eb4d00897621e1a1e2e9be"} Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.313953 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/569b68e0-5d1e-4594-823f-01f47d8dea6f-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.314299 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wfn2w\" (UniqueName: \"kubernetes.io/projected/569b68e0-5d1e-4594-823f-01f47d8dea6f-kube-api-access-wfn2w\") on node \"crc\" DevicePath \"\"" Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.323542 4661 scope.go:117] "RemoveContainer" containerID="c28bc345bd8f2ebd7d777bf31e55c3c172884452b0747e350c6f907764a66f74" Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.359907 4661 scope.go:117] "RemoveContainer" containerID="a65e7c5945ea27362389f7a7ee28c9c2d91e11d6339e395ba2021790736ce76b" Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.391358 4661 scope.go:117] "RemoveContainer" containerID="f7fa8f474355c72cbe492e7bf7d1d05abb16ed5aa0fe651099812ca8ff4a8f08" Oct 01 05:42:38 crc kubenswrapper[4661]: E1001 05:42:38.391900 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7fa8f474355c72cbe492e7bf7d1d05abb16ed5aa0fe651099812ca8ff4a8f08\": container with ID starting with f7fa8f474355c72cbe492e7bf7d1d05abb16ed5aa0fe651099812ca8ff4a8f08 not found: ID does not exist" containerID="f7fa8f474355c72cbe492e7bf7d1d05abb16ed5aa0fe651099812ca8ff4a8f08" Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.391956 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7fa8f474355c72cbe492e7bf7d1d05abb16ed5aa0fe651099812ca8ff4a8f08"} err="failed to get container status \"f7fa8f474355c72cbe492e7bf7d1d05abb16ed5aa0fe651099812ca8ff4a8f08\": rpc error: code = NotFound desc = could not find container \"f7fa8f474355c72cbe492e7bf7d1d05abb16ed5aa0fe651099812ca8ff4a8f08\": container with ID starting with f7fa8f474355c72cbe492e7bf7d1d05abb16ed5aa0fe651099812ca8ff4a8f08 not found: ID does not exist" Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.391991 4661 scope.go:117] "RemoveContainer" containerID="c28bc345bd8f2ebd7d777bf31e55c3c172884452b0747e350c6f907764a66f74" Oct 01 05:42:38 crc kubenswrapper[4661]: E1001 05:42:38.392329 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c28bc345bd8f2ebd7d777bf31e55c3c172884452b0747e350c6f907764a66f74\": container with ID starting with c28bc345bd8f2ebd7d777bf31e55c3c172884452b0747e350c6f907764a66f74 not found: ID does not exist" containerID="c28bc345bd8f2ebd7d777bf31e55c3c172884452b0747e350c6f907764a66f74" Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.392373 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c28bc345bd8f2ebd7d777bf31e55c3c172884452b0747e350c6f907764a66f74"} err="failed to get container status \"c28bc345bd8f2ebd7d777bf31e55c3c172884452b0747e350c6f907764a66f74\": rpc error: code = NotFound desc = could not find container \"c28bc345bd8f2ebd7d777bf31e55c3c172884452b0747e350c6f907764a66f74\": container with ID starting with c28bc345bd8f2ebd7d777bf31e55c3c172884452b0747e350c6f907764a66f74 not found: ID does not exist" Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.392401 4661 scope.go:117] "RemoveContainer" containerID="a65e7c5945ea27362389f7a7ee28c9c2d91e11d6339e395ba2021790736ce76b" Oct 01 05:42:38 crc kubenswrapper[4661]: E1001 05:42:38.392763 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a65e7c5945ea27362389f7a7ee28c9c2d91e11d6339e395ba2021790736ce76b\": container with ID starting with a65e7c5945ea27362389f7a7ee28c9c2d91e11d6339e395ba2021790736ce76b not found: ID does not exist" containerID="a65e7c5945ea27362389f7a7ee28c9c2d91e11d6339e395ba2021790736ce76b" Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.392801 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a65e7c5945ea27362389f7a7ee28c9c2d91e11d6339e395ba2021790736ce76b"} err="failed to get container status \"a65e7c5945ea27362389f7a7ee28c9c2d91e11d6339e395ba2021790736ce76b\": rpc error: code = NotFound desc = could not find container \"a65e7c5945ea27362389f7a7ee28c9c2d91e11d6339e395ba2021790736ce76b\": container with ID starting with a65e7c5945ea27362389f7a7ee28c9c2d91e11d6339e395ba2021790736ce76b not found: ID does not exist" Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.660674 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/569b68e0-5d1e-4594-823f-01f47d8dea6f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "569b68e0-5d1e-4594-823f-01f47d8dea6f" (UID: "569b68e0-5d1e-4594-823f-01f47d8dea6f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.719782 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/569b68e0-5d1e-4594-823f-01f47d8dea6f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.925563 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ndk2z"] Oct 01 05:42:38 crc kubenswrapper[4661]: I1001 05:42:38.932765 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ndk2z"] Oct 01 05:42:39 crc kubenswrapper[4661]: I1001 05:42:39.330090 4661 generic.go:334] "Generic (PLEG): container finished" podID="6872061b-610d-47c4-bcea-d3a9b9e507f4" containerID="7d8ad7130cd810205e540bc4680dc2757673ae0fe0c9d24db8f5f4811b5f23d6" exitCode=0 Oct 01 05:42:39 crc kubenswrapper[4661]: I1001 05:42:39.330141 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jk5qx" event={"ID":"6872061b-610d-47c4-bcea-d3a9b9e507f4","Type":"ContainerDied","Data":"7d8ad7130cd810205e540bc4680dc2757673ae0fe0c9d24db8f5f4811b5f23d6"} Oct 01 05:42:39 crc kubenswrapper[4661]: I1001 05:42:39.771531 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="569b68e0-5d1e-4594-823f-01f47d8dea6f" path="/var/lib/kubelet/pods/569b68e0-5d1e-4594-823f-01f47d8dea6f/volumes" Oct 01 05:42:40 crc kubenswrapper[4661]: I1001 05:42:40.342442 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jk5qx" event={"ID":"6872061b-610d-47c4-bcea-d3a9b9e507f4","Type":"ContainerStarted","Data":"583503e8140e7a95d71ba32fd2eaf33ebf2d6290d15d8e2da374b5de1e4a3e25"} Oct 01 05:42:40 crc kubenswrapper[4661]: I1001 05:42:40.342495 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jk5qx" event={"ID":"6872061b-610d-47c4-bcea-d3a9b9e507f4","Type":"ContainerStarted","Data":"481f2e124784b0b63d6ed5cc4b0d9f227d095237224e10e7d37991c47ea04363"} Oct 01 05:42:40 crc kubenswrapper[4661]: I1001 05:42:40.342505 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jk5qx" event={"ID":"6872061b-610d-47c4-bcea-d3a9b9e507f4","Type":"ContainerStarted","Data":"9ab0636fc9dea21690c56d7a31c6859fb71fce534373a67e28fb4b30a340c8a3"} Oct 01 05:42:41 crc kubenswrapper[4661]: I1001 05:42:41.355601 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jk5qx" event={"ID":"6872061b-610d-47c4-bcea-d3a9b9e507f4","Type":"ContainerStarted","Data":"a783afbd416cfce8ae2cec86778184fe49b6bf9d45aeb7024c60028bfc3a5408"} Oct 01 05:42:41 crc kubenswrapper[4661]: I1001 05:42:41.355945 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jk5qx" event={"ID":"6872061b-610d-47c4-bcea-d3a9b9e507f4","Type":"ContainerStarted","Data":"f2c251eef3384f3fe3dc53e5e579c512ef30c6ddc1db895bb2cf3d6325b62e0b"} Oct 01 05:42:41 crc kubenswrapper[4661]: I1001 05:42:41.355974 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:41 crc kubenswrapper[4661]: I1001 05:42:41.355989 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jk5qx" event={"ID":"6872061b-610d-47c4-bcea-d3a9b9e507f4","Type":"ContainerStarted","Data":"6d58a67f1931652e68c882f06a36d6a0c70adbcb7565b8b53a8e4026653b9e30"} Oct 01 05:42:41 crc kubenswrapper[4661]: I1001 05:42:41.398574 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-jk5qx" podStartSLOduration=5.707923342 podStartE2EDuration="12.39854848s" podCreationTimestamp="2025-10-01 05:42:29 +0000 UTC" firstStartedPulling="2025-10-01 05:42:30.008028216 +0000 UTC m=+798.946006850" lastFinishedPulling="2025-10-01 05:42:36.698653364 +0000 UTC m=+805.636631988" observedRunningTime="2025-10-01 05:42:41.39320518 +0000 UTC m=+810.331183824" watchObservedRunningTime="2025-10-01 05:42:41.39854848 +0000 UTC m=+810.336527144" Oct 01 05:42:41 crc kubenswrapper[4661]: I1001 05:42:41.439527 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-x4czl" Oct 01 05:42:44 crc kubenswrapper[4661]: I1001 05:42:44.484211 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-6zrjf"] Oct 01 05:42:44 crc kubenswrapper[4661]: E1001 05:42:44.485250 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="569b68e0-5d1e-4594-823f-01f47d8dea6f" containerName="extract-content" Oct 01 05:42:44 crc kubenswrapper[4661]: I1001 05:42:44.485273 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="569b68e0-5d1e-4594-823f-01f47d8dea6f" containerName="extract-content" Oct 01 05:42:44 crc kubenswrapper[4661]: E1001 05:42:44.485306 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="569b68e0-5d1e-4594-823f-01f47d8dea6f" containerName="registry-server" Oct 01 05:42:44 crc kubenswrapper[4661]: I1001 05:42:44.485316 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="569b68e0-5d1e-4594-823f-01f47d8dea6f" containerName="registry-server" Oct 01 05:42:44 crc kubenswrapper[4661]: E1001 05:42:44.485333 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="569b68e0-5d1e-4594-823f-01f47d8dea6f" containerName="extract-utilities" Oct 01 05:42:44 crc kubenswrapper[4661]: I1001 05:42:44.485422 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="569b68e0-5d1e-4594-823f-01f47d8dea6f" containerName="extract-utilities" Oct 01 05:42:44 crc kubenswrapper[4661]: I1001 05:42:44.485593 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="569b68e0-5d1e-4594-823f-01f47d8dea6f" containerName="registry-server" Oct 01 05:42:44 crc kubenswrapper[4661]: I1001 05:42:44.486400 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6zrjf" Oct 01 05:42:44 crc kubenswrapper[4661]: I1001 05:42:44.492866 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-6zrjf"] Oct 01 05:42:44 crc kubenswrapper[4661]: I1001 05:42:44.493095 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-4rst4" Oct 01 05:42:44 crc kubenswrapper[4661]: I1001 05:42:44.497192 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Oct 01 05:42:44 crc kubenswrapper[4661]: I1001 05:42:44.497250 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Oct 01 05:42:44 crc kubenswrapper[4661]: I1001 05:42:44.601573 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5nng\" (UniqueName: \"kubernetes.io/projected/be3e7072-4e48-45dd-bd9f-3df3180ff8e1-kube-api-access-z5nng\") pod \"openstack-operator-index-6zrjf\" (UID: \"be3e7072-4e48-45dd-bd9f-3df3180ff8e1\") " pod="openstack-operators/openstack-operator-index-6zrjf" Oct 01 05:42:44 crc kubenswrapper[4661]: I1001 05:42:44.703036 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5nng\" (UniqueName: \"kubernetes.io/projected/be3e7072-4e48-45dd-bd9f-3df3180ff8e1-kube-api-access-z5nng\") pod \"openstack-operator-index-6zrjf\" (UID: \"be3e7072-4e48-45dd-bd9f-3df3180ff8e1\") " pod="openstack-operators/openstack-operator-index-6zrjf" Oct 01 05:42:44 crc kubenswrapper[4661]: I1001 05:42:44.733936 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5nng\" (UniqueName: \"kubernetes.io/projected/be3e7072-4e48-45dd-bd9f-3df3180ff8e1-kube-api-access-z5nng\") pod \"openstack-operator-index-6zrjf\" (UID: \"be3e7072-4e48-45dd-bd9f-3df3180ff8e1\") " pod="openstack-operators/openstack-operator-index-6zrjf" Oct 01 05:42:44 crc kubenswrapper[4661]: I1001 05:42:44.813556 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6zrjf" Oct 01 05:42:44 crc kubenswrapper[4661]: I1001 05:42:44.867815 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:44 crc kubenswrapper[4661]: I1001 05:42:44.929037 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:45 crc kubenswrapper[4661]: I1001 05:42:45.259147 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-6zrjf"] Oct 01 05:42:45 crc kubenswrapper[4661]: W1001 05:42:45.265333 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbe3e7072_4e48_45dd_bd9f_3df3180ff8e1.slice/crio-2f497d150f96f122a3e53a796390b7a552a8162d4d995fecd75d8b222b93a90d WatchSource:0}: Error finding container 2f497d150f96f122a3e53a796390b7a552a8162d4d995fecd75d8b222b93a90d: Status 404 returned error can't find the container with id 2f497d150f96f122a3e53a796390b7a552a8162d4d995fecd75d8b222b93a90d Oct 01 05:42:45 crc kubenswrapper[4661]: I1001 05:42:45.381901 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6zrjf" event={"ID":"be3e7072-4e48-45dd-bd9f-3df3180ff8e1","Type":"ContainerStarted","Data":"2f497d150f96f122a3e53a796390b7a552a8162d4d995fecd75d8b222b93a90d"} Oct 01 05:42:47 crc kubenswrapper[4661]: I1001 05:42:47.398321 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6zrjf" event={"ID":"be3e7072-4e48-45dd-bd9f-3df3180ff8e1","Type":"ContainerStarted","Data":"9ea2ce1264bdf72f46785332effbdf4631d6ae8e5b3e0d24650e5269cc6bce83"} Oct 01 05:42:47 crc kubenswrapper[4661]: I1001 05:42:47.421140 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-6zrjf" podStartSLOduration=1.732681188 podStartE2EDuration="3.421117247s" podCreationTimestamp="2025-10-01 05:42:44 +0000 UTC" firstStartedPulling="2025-10-01 05:42:45.267728522 +0000 UTC m=+814.205707126" lastFinishedPulling="2025-10-01 05:42:46.956164521 +0000 UTC m=+815.894143185" observedRunningTime="2025-10-01 05:42:47.416715422 +0000 UTC m=+816.354694116" watchObservedRunningTime="2025-10-01 05:42:47.421117247 +0000 UTC m=+816.359095901" Oct 01 05:42:47 crc kubenswrapper[4661]: I1001 05:42:47.858847 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-6zrjf"] Oct 01 05:42:48 crc kubenswrapper[4661]: I1001 05:42:48.468734 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-cvddl"] Oct 01 05:42:48 crc kubenswrapper[4661]: I1001 05:42:48.512893 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-cvddl"] Oct 01 05:42:48 crc kubenswrapper[4661]: I1001 05:42:48.513036 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-cvddl" Oct 01 05:42:48 crc kubenswrapper[4661]: I1001 05:42:48.660911 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4cqdd\" (UniqueName: \"kubernetes.io/projected/2dc07b8e-5f1a-46cc-a33d-ffc63239d05a-kube-api-access-4cqdd\") pod \"openstack-operator-index-cvddl\" (UID: \"2dc07b8e-5f1a-46cc-a33d-ffc63239d05a\") " pod="openstack-operators/openstack-operator-index-cvddl" Oct 01 05:42:48 crc kubenswrapper[4661]: I1001 05:42:48.763118 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4cqdd\" (UniqueName: \"kubernetes.io/projected/2dc07b8e-5f1a-46cc-a33d-ffc63239d05a-kube-api-access-4cqdd\") pod \"openstack-operator-index-cvddl\" (UID: \"2dc07b8e-5f1a-46cc-a33d-ffc63239d05a\") " pod="openstack-operators/openstack-operator-index-cvddl" Oct 01 05:42:48 crc kubenswrapper[4661]: I1001 05:42:48.790982 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4cqdd\" (UniqueName: \"kubernetes.io/projected/2dc07b8e-5f1a-46cc-a33d-ffc63239d05a-kube-api-access-4cqdd\") pod \"openstack-operator-index-cvddl\" (UID: \"2dc07b8e-5f1a-46cc-a33d-ffc63239d05a\") " pod="openstack-operators/openstack-operator-index-cvddl" Oct 01 05:42:48 crc kubenswrapper[4661]: I1001 05:42:48.839331 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-cvddl" Oct 01 05:42:49 crc kubenswrapper[4661]: I1001 05:42:49.346981 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-cvddl"] Oct 01 05:42:49 crc kubenswrapper[4661]: W1001 05:42:49.353894 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2dc07b8e_5f1a_46cc_a33d_ffc63239d05a.slice/crio-c91c933cdbe9896f5a4c143640bec4d65cce20dbcd94fe77f375f557e0c5a684 WatchSource:0}: Error finding container c91c933cdbe9896f5a4c143640bec4d65cce20dbcd94fe77f375f557e0c5a684: Status 404 returned error can't find the container with id c91c933cdbe9896f5a4c143640bec4d65cce20dbcd94fe77f375f557e0c5a684 Oct 01 05:42:49 crc kubenswrapper[4661]: I1001 05:42:49.415914 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-cvddl" event={"ID":"2dc07b8e-5f1a-46cc-a33d-ffc63239d05a","Type":"ContainerStarted","Data":"c91c933cdbe9896f5a4c143640bec4d65cce20dbcd94fe77f375f557e0c5a684"} Oct 01 05:42:49 crc kubenswrapper[4661]: I1001 05:42:49.415989 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-6zrjf" podUID="be3e7072-4e48-45dd-bd9f-3df3180ff8e1" containerName="registry-server" containerID="cri-o://9ea2ce1264bdf72f46785332effbdf4631d6ae8e5b3e0d24650e5269cc6bce83" gracePeriod=2 Oct 01 05:42:49 crc kubenswrapper[4661]: I1001 05:42:49.871373 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-jk5qx" Oct 01 05:42:49 crc kubenswrapper[4661]: I1001 05:42:49.946300 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6zrjf" Oct 01 05:42:49 crc kubenswrapper[4661]: I1001 05:42:49.960593 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5d688f5ffc-xfbgw" Oct 01 05:42:50 crc kubenswrapper[4661]: I1001 05:42:50.083487 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5nng\" (UniqueName: \"kubernetes.io/projected/be3e7072-4e48-45dd-bd9f-3df3180ff8e1-kube-api-access-z5nng\") pod \"be3e7072-4e48-45dd-bd9f-3df3180ff8e1\" (UID: \"be3e7072-4e48-45dd-bd9f-3df3180ff8e1\") " Oct 01 05:42:50 crc kubenswrapper[4661]: I1001 05:42:50.089094 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be3e7072-4e48-45dd-bd9f-3df3180ff8e1-kube-api-access-z5nng" (OuterVolumeSpecName: "kube-api-access-z5nng") pod "be3e7072-4e48-45dd-bd9f-3df3180ff8e1" (UID: "be3e7072-4e48-45dd-bd9f-3df3180ff8e1"). InnerVolumeSpecName "kube-api-access-z5nng". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:42:50 crc kubenswrapper[4661]: I1001 05:42:50.185698 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5nng\" (UniqueName: \"kubernetes.io/projected/be3e7072-4e48-45dd-bd9f-3df3180ff8e1-kube-api-access-z5nng\") on node \"crc\" DevicePath \"\"" Oct 01 05:42:50 crc kubenswrapper[4661]: I1001 05:42:50.427755 4661 generic.go:334] "Generic (PLEG): container finished" podID="be3e7072-4e48-45dd-bd9f-3df3180ff8e1" containerID="9ea2ce1264bdf72f46785332effbdf4631d6ae8e5b3e0d24650e5269cc6bce83" exitCode=0 Oct 01 05:42:50 crc kubenswrapper[4661]: I1001 05:42:50.427833 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6zrjf" event={"ID":"be3e7072-4e48-45dd-bd9f-3df3180ff8e1","Type":"ContainerDied","Data":"9ea2ce1264bdf72f46785332effbdf4631d6ae8e5b3e0d24650e5269cc6bce83"} Oct 01 05:42:50 crc kubenswrapper[4661]: I1001 05:42:50.427861 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6zrjf" Oct 01 05:42:50 crc kubenswrapper[4661]: I1001 05:42:50.427902 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6zrjf" event={"ID":"be3e7072-4e48-45dd-bd9f-3df3180ff8e1","Type":"ContainerDied","Data":"2f497d150f96f122a3e53a796390b7a552a8162d4d995fecd75d8b222b93a90d"} Oct 01 05:42:50 crc kubenswrapper[4661]: I1001 05:42:50.427932 4661 scope.go:117] "RemoveContainer" containerID="9ea2ce1264bdf72f46785332effbdf4631d6ae8e5b3e0d24650e5269cc6bce83" Oct 01 05:42:50 crc kubenswrapper[4661]: I1001 05:42:50.430271 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-cvddl" event={"ID":"2dc07b8e-5f1a-46cc-a33d-ffc63239d05a","Type":"ContainerStarted","Data":"953f032a0ce8ffbfe8c8b53ea9e0b1a94b5f8ebb461bb151f12ab88822cf8a48"} Oct 01 05:42:50 crc kubenswrapper[4661]: I1001 05:42:50.464585 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-cvddl" podStartSLOduration=1.937483842 podStartE2EDuration="2.464554073s" podCreationTimestamp="2025-10-01 05:42:48 +0000 UTC" firstStartedPulling="2025-10-01 05:42:49.35987559 +0000 UTC m=+818.297854244" lastFinishedPulling="2025-10-01 05:42:49.886945841 +0000 UTC m=+818.824924475" observedRunningTime="2025-10-01 05:42:50.45446958 +0000 UTC m=+819.392448194" watchObservedRunningTime="2025-10-01 05:42:50.464554073 +0000 UTC m=+819.402532717" Oct 01 05:42:50 crc kubenswrapper[4661]: I1001 05:42:50.465522 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zp8cf" Oct 01 05:42:50 crc kubenswrapper[4661]: I1001 05:42:50.470762 4661 scope.go:117] "RemoveContainer" containerID="9ea2ce1264bdf72f46785332effbdf4631d6ae8e5b3e0d24650e5269cc6bce83" Oct 01 05:42:50 crc kubenswrapper[4661]: E1001 05:42:50.471477 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ea2ce1264bdf72f46785332effbdf4631d6ae8e5b3e0d24650e5269cc6bce83\": container with ID starting with 9ea2ce1264bdf72f46785332effbdf4631d6ae8e5b3e0d24650e5269cc6bce83 not found: ID does not exist" containerID="9ea2ce1264bdf72f46785332effbdf4631d6ae8e5b3e0d24650e5269cc6bce83" Oct 01 05:42:50 crc kubenswrapper[4661]: I1001 05:42:50.471538 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ea2ce1264bdf72f46785332effbdf4631d6ae8e5b3e0d24650e5269cc6bce83"} err="failed to get container status \"9ea2ce1264bdf72f46785332effbdf4631d6ae8e5b3e0d24650e5269cc6bce83\": rpc error: code = NotFound desc = could not find container \"9ea2ce1264bdf72f46785332effbdf4631d6ae8e5b3e0d24650e5269cc6bce83\": container with ID starting with 9ea2ce1264bdf72f46785332effbdf4631d6ae8e5b3e0d24650e5269cc6bce83 not found: ID does not exist" Oct 01 05:42:50 crc kubenswrapper[4661]: I1001 05:42:50.518299 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-6zrjf"] Oct 01 05:42:50 crc kubenswrapper[4661]: I1001 05:42:50.522337 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-6zrjf"] Oct 01 05:42:51 crc kubenswrapper[4661]: I1001 05:42:51.771606 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be3e7072-4e48-45dd-bd9f-3df3180ff8e1" path="/var/lib/kubelet/pods/be3e7072-4e48-45dd-bd9f-3df3180ff8e1/volumes" Oct 01 05:42:58 crc kubenswrapper[4661]: I1001 05:42:58.840242 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-cvddl" Oct 01 05:42:58 crc kubenswrapper[4661]: I1001 05:42:58.842424 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-cvddl" Oct 01 05:42:58 crc kubenswrapper[4661]: I1001 05:42:58.888228 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-cvddl" Oct 01 05:42:59 crc kubenswrapper[4661]: I1001 05:42:59.578706 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-cvddl" Oct 01 05:43:04 crc kubenswrapper[4661]: I1001 05:43:04.071919 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fcg2n"] Oct 01 05:43:04 crc kubenswrapper[4661]: E1001 05:43:04.073442 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be3e7072-4e48-45dd-bd9f-3df3180ff8e1" containerName="registry-server" Oct 01 05:43:04 crc kubenswrapper[4661]: I1001 05:43:04.073477 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="be3e7072-4e48-45dd-bd9f-3df3180ff8e1" containerName="registry-server" Oct 01 05:43:04 crc kubenswrapper[4661]: I1001 05:43:04.074215 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="be3e7072-4e48-45dd-bd9f-3df3180ff8e1" containerName="registry-server" Oct 01 05:43:04 crc kubenswrapper[4661]: I1001 05:43:04.082099 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fcg2n" Oct 01 05:43:04 crc kubenswrapper[4661]: I1001 05:43:04.111852 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fcg2n"] Oct 01 05:43:04 crc kubenswrapper[4661]: I1001 05:43:04.204205 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79065633-b245-4e74-9940-669c46fec506-catalog-content\") pod \"certified-operators-fcg2n\" (UID: \"79065633-b245-4e74-9940-669c46fec506\") " pod="openshift-marketplace/certified-operators-fcg2n" Oct 01 05:43:04 crc kubenswrapper[4661]: I1001 05:43:04.204290 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79065633-b245-4e74-9940-669c46fec506-utilities\") pod \"certified-operators-fcg2n\" (UID: \"79065633-b245-4e74-9940-669c46fec506\") " pod="openshift-marketplace/certified-operators-fcg2n" Oct 01 05:43:04 crc kubenswrapper[4661]: I1001 05:43:04.204497 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmfk2\" (UniqueName: \"kubernetes.io/projected/79065633-b245-4e74-9940-669c46fec506-kube-api-access-fmfk2\") pod \"certified-operators-fcg2n\" (UID: \"79065633-b245-4e74-9940-669c46fec506\") " pod="openshift-marketplace/certified-operators-fcg2n" Oct 01 05:43:04 crc kubenswrapper[4661]: I1001 05:43:04.306509 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79065633-b245-4e74-9940-669c46fec506-utilities\") pod \"certified-operators-fcg2n\" (UID: \"79065633-b245-4e74-9940-669c46fec506\") " pod="openshift-marketplace/certified-operators-fcg2n" Oct 01 05:43:04 crc kubenswrapper[4661]: I1001 05:43:04.306932 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmfk2\" (UniqueName: \"kubernetes.io/projected/79065633-b245-4e74-9940-669c46fec506-kube-api-access-fmfk2\") pod \"certified-operators-fcg2n\" (UID: \"79065633-b245-4e74-9940-669c46fec506\") " pod="openshift-marketplace/certified-operators-fcg2n" Oct 01 05:43:04 crc kubenswrapper[4661]: I1001 05:43:04.306980 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79065633-b245-4e74-9940-669c46fec506-catalog-content\") pod \"certified-operators-fcg2n\" (UID: \"79065633-b245-4e74-9940-669c46fec506\") " pod="openshift-marketplace/certified-operators-fcg2n" Oct 01 05:43:04 crc kubenswrapper[4661]: I1001 05:43:04.307350 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79065633-b245-4e74-9940-669c46fec506-utilities\") pod \"certified-operators-fcg2n\" (UID: \"79065633-b245-4e74-9940-669c46fec506\") " pod="openshift-marketplace/certified-operators-fcg2n" Oct 01 05:43:04 crc kubenswrapper[4661]: I1001 05:43:04.307540 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79065633-b245-4e74-9940-669c46fec506-catalog-content\") pod \"certified-operators-fcg2n\" (UID: \"79065633-b245-4e74-9940-669c46fec506\") " pod="openshift-marketplace/certified-operators-fcg2n" Oct 01 05:43:04 crc kubenswrapper[4661]: I1001 05:43:04.331298 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmfk2\" (UniqueName: \"kubernetes.io/projected/79065633-b245-4e74-9940-669c46fec506-kube-api-access-fmfk2\") pod \"certified-operators-fcg2n\" (UID: \"79065633-b245-4e74-9940-669c46fec506\") " pod="openshift-marketplace/certified-operators-fcg2n" Oct 01 05:43:04 crc kubenswrapper[4661]: I1001 05:43:04.413544 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fcg2n" Oct 01 05:43:04 crc kubenswrapper[4661]: I1001 05:43:04.843566 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fcg2n"] Oct 01 05:43:04 crc kubenswrapper[4661]: W1001 05:43:04.853598 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod79065633_b245_4e74_9940_669c46fec506.slice/crio-9d198496e02a24720fc5c58c824c8b33abf5473d5c05431b33fdeef3227e6715 WatchSource:0}: Error finding container 9d198496e02a24720fc5c58c824c8b33abf5473d5c05431b33fdeef3227e6715: Status 404 returned error can't find the container with id 9d198496e02a24720fc5c58c824c8b33abf5473d5c05431b33fdeef3227e6715 Oct 01 05:43:05 crc kubenswrapper[4661]: I1001 05:43:05.594375 4661 generic.go:334] "Generic (PLEG): container finished" podID="79065633-b245-4e74-9940-669c46fec506" containerID="75aa477ba105bce57b756924c77afeeb19256678831c480775164f92c0f5f73d" exitCode=0 Oct 01 05:43:05 crc kubenswrapper[4661]: I1001 05:43:05.594426 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fcg2n" event={"ID":"79065633-b245-4e74-9940-669c46fec506","Type":"ContainerDied","Data":"75aa477ba105bce57b756924c77afeeb19256678831c480775164f92c0f5f73d"} Oct 01 05:43:05 crc kubenswrapper[4661]: I1001 05:43:05.594457 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fcg2n" event={"ID":"79065633-b245-4e74-9940-669c46fec506","Type":"ContainerStarted","Data":"9d198496e02a24720fc5c58c824c8b33abf5473d5c05431b33fdeef3227e6715"} Oct 01 05:43:06 crc kubenswrapper[4661]: I1001 05:43:06.503880 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd"] Oct 01 05:43:06 crc kubenswrapper[4661]: I1001 05:43:06.505239 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd" Oct 01 05:43:06 crc kubenswrapper[4661]: I1001 05:43:06.508609 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-xcxl8" Oct 01 05:43:06 crc kubenswrapper[4661]: I1001 05:43:06.514528 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd"] Oct 01 05:43:06 crc kubenswrapper[4661]: I1001 05:43:06.600990 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fcg2n" event={"ID":"79065633-b245-4e74-9940-669c46fec506","Type":"ContainerStarted","Data":"8e150a629cff1afe2ef402722192508a8f9fb84c51f7554bc07b1f295e921c8b"} Oct 01 05:43:06 crc kubenswrapper[4661]: I1001 05:43:06.638079 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e97f5f22-3f8d-416c-a584-6d49ccb28f70-bundle\") pod \"edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd\" (UID: \"e97f5f22-3f8d-416c-a584-6d49ccb28f70\") " pod="openstack-operators/edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd" Oct 01 05:43:06 crc kubenswrapper[4661]: I1001 05:43:06.638221 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jlng\" (UniqueName: \"kubernetes.io/projected/e97f5f22-3f8d-416c-a584-6d49ccb28f70-kube-api-access-2jlng\") pod \"edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd\" (UID: \"e97f5f22-3f8d-416c-a584-6d49ccb28f70\") " pod="openstack-operators/edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd" Oct 01 05:43:06 crc kubenswrapper[4661]: I1001 05:43:06.638264 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e97f5f22-3f8d-416c-a584-6d49ccb28f70-util\") pod \"edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd\" (UID: \"e97f5f22-3f8d-416c-a584-6d49ccb28f70\") " pod="openstack-operators/edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd" Oct 01 05:43:06 crc kubenswrapper[4661]: I1001 05:43:06.739317 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jlng\" (UniqueName: \"kubernetes.io/projected/e97f5f22-3f8d-416c-a584-6d49ccb28f70-kube-api-access-2jlng\") pod \"edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd\" (UID: \"e97f5f22-3f8d-416c-a584-6d49ccb28f70\") " pod="openstack-operators/edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd" Oct 01 05:43:06 crc kubenswrapper[4661]: I1001 05:43:06.739389 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e97f5f22-3f8d-416c-a584-6d49ccb28f70-util\") pod \"edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd\" (UID: \"e97f5f22-3f8d-416c-a584-6d49ccb28f70\") " pod="openstack-operators/edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd" Oct 01 05:43:06 crc kubenswrapper[4661]: I1001 05:43:06.739433 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e97f5f22-3f8d-416c-a584-6d49ccb28f70-bundle\") pod \"edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd\" (UID: \"e97f5f22-3f8d-416c-a584-6d49ccb28f70\") " pod="openstack-operators/edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd" Oct 01 05:43:06 crc kubenswrapper[4661]: I1001 05:43:06.739962 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e97f5f22-3f8d-416c-a584-6d49ccb28f70-bundle\") pod \"edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd\" (UID: \"e97f5f22-3f8d-416c-a584-6d49ccb28f70\") " pod="openstack-operators/edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd" Oct 01 05:43:06 crc kubenswrapper[4661]: I1001 05:43:06.739996 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e97f5f22-3f8d-416c-a584-6d49ccb28f70-util\") pod \"edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd\" (UID: \"e97f5f22-3f8d-416c-a584-6d49ccb28f70\") " pod="openstack-operators/edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd" Oct 01 05:43:06 crc kubenswrapper[4661]: I1001 05:43:06.759395 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jlng\" (UniqueName: \"kubernetes.io/projected/e97f5f22-3f8d-416c-a584-6d49ccb28f70-kube-api-access-2jlng\") pod \"edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd\" (UID: \"e97f5f22-3f8d-416c-a584-6d49ccb28f70\") " pod="openstack-operators/edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd" Oct 01 05:43:06 crc kubenswrapper[4661]: I1001 05:43:06.821310 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd" Oct 01 05:43:07 crc kubenswrapper[4661]: I1001 05:43:07.236883 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd"] Oct 01 05:43:07 crc kubenswrapper[4661]: W1001 05:43:07.246781 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode97f5f22_3f8d_416c_a584_6d49ccb28f70.slice/crio-4ae41c4a3ec30738dd545147e92610f4ee31bf61a170e5385df3d0529d9aa74d WatchSource:0}: Error finding container 4ae41c4a3ec30738dd545147e92610f4ee31bf61a170e5385df3d0529d9aa74d: Status 404 returned error can't find the container with id 4ae41c4a3ec30738dd545147e92610f4ee31bf61a170e5385df3d0529d9aa74d Oct 01 05:43:07 crc kubenswrapper[4661]: I1001 05:43:07.610272 4661 generic.go:334] "Generic (PLEG): container finished" podID="e97f5f22-3f8d-416c-a584-6d49ccb28f70" containerID="3232ef6c1d5da6bf0cd85665b3869af09a8d70fb4b5a876b64f97d0e7ad1a1e4" exitCode=0 Oct 01 05:43:07 crc kubenswrapper[4661]: I1001 05:43:07.610373 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd" event={"ID":"e97f5f22-3f8d-416c-a584-6d49ccb28f70","Type":"ContainerDied","Data":"3232ef6c1d5da6bf0cd85665b3869af09a8d70fb4b5a876b64f97d0e7ad1a1e4"} Oct 01 05:43:07 crc kubenswrapper[4661]: I1001 05:43:07.610410 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd" event={"ID":"e97f5f22-3f8d-416c-a584-6d49ccb28f70","Type":"ContainerStarted","Data":"4ae41c4a3ec30738dd545147e92610f4ee31bf61a170e5385df3d0529d9aa74d"} Oct 01 05:43:07 crc kubenswrapper[4661]: I1001 05:43:07.613536 4661 generic.go:334] "Generic (PLEG): container finished" podID="79065633-b245-4e74-9940-669c46fec506" containerID="8e150a629cff1afe2ef402722192508a8f9fb84c51f7554bc07b1f295e921c8b" exitCode=0 Oct 01 05:43:07 crc kubenswrapper[4661]: I1001 05:43:07.613706 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fcg2n" event={"ID":"79065633-b245-4e74-9940-669c46fec506","Type":"ContainerDied","Data":"8e150a629cff1afe2ef402722192508a8f9fb84c51f7554bc07b1f295e921c8b"} Oct 01 05:43:08 crc kubenswrapper[4661]: I1001 05:43:08.626069 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd" event={"ID":"e97f5f22-3f8d-416c-a584-6d49ccb28f70","Type":"ContainerStarted","Data":"5965a829fde3dabb8486b039e3c92783a56716e62f02c21c6f1d736c36d16c95"} Oct 01 05:43:08 crc kubenswrapper[4661]: I1001 05:43:08.628204 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fcg2n" event={"ID":"79065633-b245-4e74-9940-669c46fec506","Type":"ContainerStarted","Data":"f97ad97b891c6b5c5bd35e4898d4d486b70866157c65e5d3f3ec53df79e8ee31"} Oct 01 05:43:08 crc kubenswrapper[4661]: I1001 05:43:08.661414 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fcg2n" podStartSLOduration=2.127403231 podStartE2EDuration="4.661394161s" podCreationTimestamp="2025-10-01 05:43:04 +0000 UTC" firstStartedPulling="2025-10-01 05:43:05.596275735 +0000 UTC m=+834.534254389" lastFinishedPulling="2025-10-01 05:43:08.130266695 +0000 UTC m=+837.068245319" observedRunningTime="2025-10-01 05:43:08.659823446 +0000 UTC m=+837.597802050" watchObservedRunningTime="2025-10-01 05:43:08.661394161 +0000 UTC m=+837.599372765" Oct 01 05:43:09 crc kubenswrapper[4661]: I1001 05:43:09.640856 4661 generic.go:334] "Generic (PLEG): container finished" podID="e97f5f22-3f8d-416c-a584-6d49ccb28f70" containerID="5965a829fde3dabb8486b039e3c92783a56716e62f02c21c6f1d736c36d16c95" exitCode=0 Oct 01 05:43:09 crc kubenswrapper[4661]: I1001 05:43:09.640905 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd" event={"ID":"e97f5f22-3f8d-416c-a584-6d49ccb28f70","Type":"ContainerDied","Data":"5965a829fde3dabb8486b039e3c92783a56716e62f02c21c6f1d736c36d16c95"} Oct 01 05:43:10 crc kubenswrapper[4661]: I1001 05:43:10.657862 4661 generic.go:334] "Generic (PLEG): container finished" podID="e97f5f22-3f8d-416c-a584-6d49ccb28f70" containerID="82d02df2eeb9c429885bc2440a64b15363e100832d1d365943d6e1183ab290e8" exitCode=0 Oct 01 05:43:10 crc kubenswrapper[4661]: I1001 05:43:10.657957 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd" event={"ID":"e97f5f22-3f8d-416c-a584-6d49ccb28f70","Type":"ContainerDied","Data":"82d02df2eeb9c429885bc2440a64b15363e100832d1d365943d6e1183ab290e8"} Oct 01 05:43:11 crc kubenswrapper[4661]: I1001 05:43:11.899795 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ws7x6"] Oct 01 05:43:11 crc kubenswrapper[4661]: I1001 05:43:11.901928 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ws7x6"] Oct 01 05:43:11 crc kubenswrapper[4661]: I1001 05:43:11.902023 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ws7x6" Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.025130 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9506d65d-858a-4eee-8b11-340a83061bb9-utilities\") pod \"redhat-operators-ws7x6\" (UID: \"9506d65d-858a-4eee-8b11-340a83061bb9\") " pod="openshift-marketplace/redhat-operators-ws7x6" Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.025349 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2446t\" (UniqueName: \"kubernetes.io/projected/9506d65d-858a-4eee-8b11-340a83061bb9-kube-api-access-2446t\") pod \"redhat-operators-ws7x6\" (UID: \"9506d65d-858a-4eee-8b11-340a83061bb9\") " pod="openshift-marketplace/redhat-operators-ws7x6" Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.025429 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9506d65d-858a-4eee-8b11-340a83061bb9-catalog-content\") pod \"redhat-operators-ws7x6\" (UID: \"9506d65d-858a-4eee-8b11-340a83061bb9\") " pod="openshift-marketplace/redhat-operators-ws7x6" Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.072587 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd" Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.126425 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9506d65d-858a-4eee-8b11-340a83061bb9-catalog-content\") pod \"redhat-operators-ws7x6\" (UID: \"9506d65d-858a-4eee-8b11-340a83061bb9\") " pod="openshift-marketplace/redhat-operators-ws7x6" Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.126658 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9506d65d-858a-4eee-8b11-340a83061bb9-utilities\") pod \"redhat-operators-ws7x6\" (UID: \"9506d65d-858a-4eee-8b11-340a83061bb9\") " pod="openshift-marketplace/redhat-operators-ws7x6" Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.126792 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2446t\" (UniqueName: \"kubernetes.io/projected/9506d65d-858a-4eee-8b11-340a83061bb9-kube-api-access-2446t\") pod \"redhat-operators-ws7x6\" (UID: \"9506d65d-858a-4eee-8b11-340a83061bb9\") " pod="openshift-marketplace/redhat-operators-ws7x6" Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.127386 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9506d65d-858a-4eee-8b11-340a83061bb9-catalog-content\") pod \"redhat-operators-ws7x6\" (UID: \"9506d65d-858a-4eee-8b11-340a83061bb9\") " pod="openshift-marketplace/redhat-operators-ws7x6" Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.127428 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9506d65d-858a-4eee-8b11-340a83061bb9-utilities\") pod \"redhat-operators-ws7x6\" (UID: \"9506d65d-858a-4eee-8b11-340a83061bb9\") " pod="openshift-marketplace/redhat-operators-ws7x6" Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.149341 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2446t\" (UniqueName: \"kubernetes.io/projected/9506d65d-858a-4eee-8b11-340a83061bb9-kube-api-access-2446t\") pod \"redhat-operators-ws7x6\" (UID: \"9506d65d-858a-4eee-8b11-340a83061bb9\") " pod="openshift-marketplace/redhat-operators-ws7x6" Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.228033 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e97f5f22-3f8d-416c-a584-6d49ccb28f70-bundle\") pod \"e97f5f22-3f8d-416c-a584-6d49ccb28f70\" (UID: \"e97f5f22-3f8d-416c-a584-6d49ccb28f70\") " Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.228097 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2jlng\" (UniqueName: \"kubernetes.io/projected/e97f5f22-3f8d-416c-a584-6d49ccb28f70-kube-api-access-2jlng\") pod \"e97f5f22-3f8d-416c-a584-6d49ccb28f70\" (UID: \"e97f5f22-3f8d-416c-a584-6d49ccb28f70\") " Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.228158 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e97f5f22-3f8d-416c-a584-6d49ccb28f70-util\") pod \"e97f5f22-3f8d-416c-a584-6d49ccb28f70\" (UID: \"e97f5f22-3f8d-416c-a584-6d49ccb28f70\") " Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.229091 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e97f5f22-3f8d-416c-a584-6d49ccb28f70-bundle" (OuterVolumeSpecName: "bundle") pod "e97f5f22-3f8d-416c-a584-6d49ccb28f70" (UID: "e97f5f22-3f8d-416c-a584-6d49ccb28f70"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.233279 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e97f5f22-3f8d-416c-a584-6d49ccb28f70-kube-api-access-2jlng" (OuterVolumeSpecName: "kube-api-access-2jlng") pod "e97f5f22-3f8d-416c-a584-6d49ccb28f70" (UID: "e97f5f22-3f8d-416c-a584-6d49ccb28f70"). InnerVolumeSpecName "kube-api-access-2jlng". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.237685 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ws7x6" Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.247506 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e97f5f22-3f8d-416c-a584-6d49ccb28f70-util" (OuterVolumeSpecName: "util") pod "e97f5f22-3f8d-416c-a584-6d49ccb28f70" (UID: "e97f5f22-3f8d-416c-a584-6d49ccb28f70"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.329547 4661 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e97f5f22-3f8d-416c-a584-6d49ccb28f70-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.329800 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2jlng\" (UniqueName: \"kubernetes.io/projected/e97f5f22-3f8d-416c-a584-6d49ccb28f70-kube-api-access-2jlng\") on node \"crc\" DevicePath \"\"" Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.329815 4661 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e97f5f22-3f8d-416c-a584-6d49ccb28f70-util\") on node \"crc\" DevicePath \"\"" Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.675196 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd" event={"ID":"e97f5f22-3f8d-416c-a584-6d49ccb28f70","Type":"ContainerDied","Data":"4ae41c4a3ec30738dd545147e92610f4ee31bf61a170e5385df3d0529d9aa74d"} Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.675245 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4ae41c4a3ec30738dd545147e92610f4ee31bf61a170e5385df3d0529d9aa74d" Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.675329 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd" Oct 01 05:43:12 crc kubenswrapper[4661]: I1001 05:43:12.680574 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ws7x6"] Oct 01 05:43:12 crc kubenswrapper[4661]: W1001 05:43:12.689033 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9506d65d_858a_4eee_8b11_340a83061bb9.slice/crio-059910ac1938da8d0d09f504088eecf361138893520729341fcaf2b78e0a974c WatchSource:0}: Error finding container 059910ac1938da8d0d09f504088eecf361138893520729341fcaf2b78e0a974c: Status 404 returned error can't find the container with id 059910ac1938da8d0d09f504088eecf361138893520729341fcaf2b78e0a974c Oct 01 05:43:13 crc kubenswrapper[4661]: I1001 05:43:13.690482 4661 generic.go:334] "Generic (PLEG): container finished" podID="9506d65d-858a-4eee-8b11-340a83061bb9" containerID="fed5cf3fed92ffce6301b662b5ae988a638f65d79f4265bfc39aefc9ad5f2d6d" exitCode=0 Oct 01 05:43:13 crc kubenswrapper[4661]: I1001 05:43:13.690811 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ws7x6" event={"ID":"9506d65d-858a-4eee-8b11-340a83061bb9","Type":"ContainerDied","Data":"fed5cf3fed92ffce6301b662b5ae988a638f65d79f4265bfc39aefc9ad5f2d6d"} Oct 01 05:43:13 crc kubenswrapper[4661]: I1001 05:43:13.691016 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ws7x6" event={"ID":"9506d65d-858a-4eee-8b11-340a83061bb9","Type":"ContainerStarted","Data":"059910ac1938da8d0d09f504088eecf361138893520729341fcaf2b78e0a974c"} Oct 01 05:43:14 crc kubenswrapper[4661]: I1001 05:43:14.414079 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fcg2n" Oct 01 05:43:14 crc kubenswrapper[4661]: I1001 05:43:14.414693 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fcg2n" Oct 01 05:43:14 crc kubenswrapper[4661]: I1001 05:43:14.505190 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fcg2n" Oct 01 05:43:14 crc kubenswrapper[4661]: I1001 05:43:14.710517 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ws7x6" event={"ID":"9506d65d-858a-4eee-8b11-340a83061bb9","Type":"ContainerStarted","Data":"80bdfbfaff1cf9ec14082037390140f7d1d17ad16f3199a302608dbd5b033e49"} Oct 01 05:43:14 crc kubenswrapper[4661]: I1001 05:43:14.778717 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fcg2n" Oct 01 05:43:15 crc kubenswrapper[4661]: I1001 05:43:15.722410 4661 generic.go:334] "Generic (PLEG): container finished" podID="9506d65d-858a-4eee-8b11-340a83061bb9" containerID="80bdfbfaff1cf9ec14082037390140f7d1d17ad16f3199a302608dbd5b033e49" exitCode=0 Oct 01 05:43:15 crc kubenswrapper[4661]: I1001 05:43:15.722503 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ws7x6" event={"ID":"9506d65d-858a-4eee-8b11-340a83061bb9","Type":"ContainerDied","Data":"80bdfbfaff1cf9ec14082037390140f7d1d17ad16f3199a302608dbd5b033e49"} Oct 01 05:43:16 crc kubenswrapper[4661]: I1001 05:43:16.733345 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ws7x6" event={"ID":"9506d65d-858a-4eee-8b11-340a83061bb9","Type":"ContainerStarted","Data":"0382f123602447b247a442368b8a3498d7f71e28b9f2ca69b3dfd71b56c2e68a"} Oct 01 05:43:16 crc kubenswrapper[4661]: I1001 05:43:16.775053 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ws7x6" podStartSLOduration=3.27467633 podStartE2EDuration="5.775034046s" podCreationTimestamp="2025-10-01 05:43:11 +0000 UTC" firstStartedPulling="2025-10-01 05:43:13.695124888 +0000 UTC m=+842.633103542" lastFinishedPulling="2025-10-01 05:43:16.195482614 +0000 UTC m=+845.133461258" observedRunningTime="2025-10-01 05:43:16.769435301 +0000 UTC m=+845.707413925" watchObservedRunningTime="2025-10-01 05:43:16.775034046 +0000 UTC m=+845.713012670" Oct 01 05:43:17 crc kubenswrapper[4661]: I1001 05:43:17.951014 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-8d79f897-dcwff"] Oct 01 05:43:17 crc kubenswrapper[4661]: E1001 05:43:17.951675 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e97f5f22-3f8d-416c-a584-6d49ccb28f70" containerName="extract" Oct 01 05:43:17 crc kubenswrapper[4661]: I1001 05:43:17.951696 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="e97f5f22-3f8d-416c-a584-6d49ccb28f70" containerName="extract" Oct 01 05:43:17 crc kubenswrapper[4661]: E1001 05:43:17.951719 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e97f5f22-3f8d-416c-a584-6d49ccb28f70" containerName="util" Oct 01 05:43:17 crc kubenswrapper[4661]: I1001 05:43:17.951726 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="e97f5f22-3f8d-416c-a584-6d49ccb28f70" containerName="util" Oct 01 05:43:17 crc kubenswrapper[4661]: E1001 05:43:17.951737 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e97f5f22-3f8d-416c-a584-6d49ccb28f70" containerName="pull" Oct 01 05:43:17 crc kubenswrapper[4661]: I1001 05:43:17.951745 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="e97f5f22-3f8d-416c-a584-6d49ccb28f70" containerName="pull" Oct 01 05:43:17 crc kubenswrapper[4661]: I1001 05:43:17.951892 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="e97f5f22-3f8d-416c-a584-6d49ccb28f70" containerName="extract" Oct 01 05:43:17 crc kubenswrapper[4661]: I1001 05:43:17.952701 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-8d79f897-dcwff" Oct 01 05:43:17 crc kubenswrapper[4661]: I1001 05:43:17.955481 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-pdtt8" Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.012351 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvlvj\" (UniqueName: \"kubernetes.io/projected/12436698-76a2-4877-8d43-8af3c769ec32-kube-api-access-bvlvj\") pod \"openstack-operator-controller-operator-8d79f897-dcwff\" (UID: \"12436698-76a2-4877-8d43-8af3c769ec32\") " pod="openstack-operators/openstack-operator-controller-operator-8d79f897-dcwff" Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.022847 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-8d79f897-dcwff"] Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.046171 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fcg2n"] Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.046392 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fcg2n" podUID="79065633-b245-4e74-9940-669c46fec506" containerName="registry-server" containerID="cri-o://f97ad97b891c6b5c5bd35e4898d4d486b70866157c65e5d3f3ec53df79e8ee31" gracePeriod=2 Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.113742 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvlvj\" (UniqueName: \"kubernetes.io/projected/12436698-76a2-4877-8d43-8af3c769ec32-kube-api-access-bvlvj\") pod \"openstack-operator-controller-operator-8d79f897-dcwff\" (UID: \"12436698-76a2-4877-8d43-8af3c769ec32\") " pod="openstack-operators/openstack-operator-controller-operator-8d79f897-dcwff" Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.133827 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvlvj\" (UniqueName: \"kubernetes.io/projected/12436698-76a2-4877-8d43-8af3c769ec32-kube-api-access-bvlvj\") pod \"openstack-operator-controller-operator-8d79f897-dcwff\" (UID: \"12436698-76a2-4877-8d43-8af3c769ec32\") " pod="openstack-operators/openstack-operator-controller-operator-8d79f897-dcwff" Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.270431 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-8d79f897-dcwff" Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.513961 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fcg2n" Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.619829 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79065633-b245-4e74-9940-669c46fec506-utilities\") pod \"79065633-b245-4e74-9940-669c46fec506\" (UID: \"79065633-b245-4e74-9940-669c46fec506\") " Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.619941 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmfk2\" (UniqueName: \"kubernetes.io/projected/79065633-b245-4e74-9940-669c46fec506-kube-api-access-fmfk2\") pod \"79065633-b245-4e74-9940-669c46fec506\" (UID: \"79065633-b245-4e74-9940-669c46fec506\") " Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.620016 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79065633-b245-4e74-9940-669c46fec506-catalog-content\") pod \"79065633-b245-4e74-9940-669c46fec506\" (UID: \"79065633-b245-4e74-9940-669c46fec506\") " Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.621210 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79065633-b245-4e74-9940-669c46fec506-utilities" (OuterVolumeSpecName: "utilities") pod "79065633-b245-4e74-9940-669c46fec506" (UID: "79065633-b245-4e74-9940-669c46fec506"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.624664 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79065633-b245-4e74-9940-669c46fec506-kube-api-access-fmfk2" (OuterVolumeSpecName: "kube-api-access-fmfk2") pod "79065633-b245-4e74-9940-669c46fec506" (UID: "79065633-b245-4e74-9940-669c46fec506"). InnerVolumeSpecName "kube-api-access-fmfk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.658002 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79065633-b245-4e74-9940-669c46fec506-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "79065633-b245-4e74-9940-669c46fec506" (UID: "79065633-b245-4e74-9940-669c46fec506"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.721962 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79065633-b245-4e74-9940-669c46fec506-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.722004 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79065633-b245-4e74-9940-669c46fec506-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.722017 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmfk2\" (UniqueName: \"kubernetes.io/projected/79065633-b245-4e74-9940-669c46fec506-kube-api-access-fmfk2\") on node \"crc\" DevicePath \"\"" Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.731361 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-8d79f897-dcwff"] Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.755835 4661 generic.go:334] "Generic (PLEG): container finished" podID="79065633-b245-4e74-9940-669c46fec506" containerID="f97ad97b891c6b5c5bd35e4898d4d486b70866157c65e5d3f3ec53df79e8ee31" exitCode=0 Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.755849 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fcg2n" event={"ID":"79065633-b245-4e74-9940-669c46fec506","Type":"ContainerDied","Data":"f97ad97b891c6b5c5bd35e4898d4d486b70866157c65e5d3f3ec53df79e8ee31"} Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.755932 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fcg2n" event={"ID":"79065633-b245-4e74-9940-669c46fec506","Type":"ContainerDied","Data":"9d198496e02a24720fc5c58c824c8b33abf5473d5c05431b33fdeef3227e6715"} Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.755991 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fcg2n" Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.756012 4661 scope.go:117] "RemoveContainer" containerID="f97ad97b891c6b5c5bd35e4898d4d486b70866157c65e5d3f3ec53df79e8ee31" Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.777748 4661 scope.go:117] "RemoveContainer" containerID="8e150a629cff1afe2ef402722192508a8f9fb84c51f7554bc07b1f295e921c8b" Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.789102 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fcg2n"] Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.793220 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fcg2n"] Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.809757 4661 scope.go:117] "RemoveContainer" containerID="75aa477ba105bce57b756924c77afeeb19256678831c480775164f92c0f5f73d" Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.832176 4661 scope.go:117] "RemoveContainer" containerID="f97ad97b891c6b5c5bd35e4898d4d486b70866157c65e5d3f3ec53df79e8ee31" Oct 01 05:43:18 crc kubenswrapper[4661]: E1001 05:43:18.832622 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f97ad97b891c6b5c5bd35e4898d4d486b70866157c65e5d3f3ec53df79e8ee31\": container with ID starting with f97ad97b891c6b5c5bd35e4898d4d486b70866157c65e5d3f3ec53df79e8ee31 not found: ID does not exist" containerID="f97ad97b891c6b5c5bd35e4898d4d486b70866157c65e5d3f3ec53df79e8ee31" Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.832680 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f97ad97b891c6b5c5bd35e4898d4d486b70866157c65e5d3f3ec53df79e8ee31"} err="failed to get container status \"f97ad97b891c6b5c5bd35e4898d4d486b70866157c65e5d3f3ec53df79e8ee31\": rpc error: code = NotFound desc = could not find container \"f97ad97b891c6b5c5bd35e4898d4d486b70866157c65e5d3f3ec53df79e8ee31\": container with ID starting with f97ad97b891c6b5c5bd35e4898d4d486b70866157c65e5d3f3ec53df79e8ee31 not found: ID does not exist" Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.832708 4661 scope.go:117] "RemoveContainer" containerID="8e150a629cff1afe2ef402722192508a8f9fb84c51f7554bc07b1f295e921c8b" Oct 01 05:43:18 crc kubenswrapper[4661]: E1001 05:43:18.833186 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e150a629cff1afe2ef402722192508a8f9fb84c51f7554bc07b1f295e921c8b\": container with ID starting with 8e150a629cff1afe2ef402722192508a8f9fb84c51f7554bc07b1f295e921c8b not found: ID does not exist" containerID="8e150a629cff1afe2ef402722192508a8f9fb84c51f7554bc07b1f295e921c8b" Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.833245 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e150a629cff1afe2ef402722192508a8f9fb84c51f7554bc07b1f295e921c8b"} err="failed to get container status \"8e150a629cff1afe2ef402722192508a8f9fb84c51f7554bc07b1f295e921c8b\": rpc error: code = NotFound desc = could not find container \"8e150a629cff1afe2ef402722192508a8f9fb84c51f7554bc07b1f295e921c8b\": container with ID starting with 8e150a629cff1afe2ef402722192508a8f9fb84c51f7554bc07b1f295e921c8b not found: ID does not exist" Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.833272 4661 scope.go:117] "RemoveContainer" containerID="75aa477ba105bce57b756924c77afeeb19256678831c480775164f92c0f5f73d" Oct 01 05:43:18 crc kubenswrapper[4661]: E1001 05:43:18.833567 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75aa477ba105bce57b756924c77afeeb19256678831c480775164f92c0f5f73d\": container with ID starting with 75aa477ba105bce57b756924c77afeeb19256678831c480775164f92c0f5f73d not found: ID does not exist" containerID="75aa477ba105bce57b756924c77afeeb19256678831c480775164f92c0f5f73d" Oct 01 05:43:18 crc kubenswrapper[4661]: I1001 05:43:18.833611 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75aa477ba105bce57b756924c77afeeb19256678831c480775164f92c0f5f73d"} err="failed to get container status \"75aa477ba105bce57b756924c77afeeb19256678831c480775164f92c0f5f73d\": rpc error: code = NotFound desc = could not find container \"75aa477ba105bce57b756924c77afeeb19256678831c480775164f92c0f5f73d\": container with ID starting with 75aa477ba105bce57b756924c77afeeb19256678831c480775164f92c0f5f73d not found: ID does not exist" Oct 01 05:43:19 crc kubenswrapper[4661]: I1001 05:43:19.775477 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79065633-b245-4e74-9940-669c46fec506" path="/var/lib/kubelet/pods/79065633-b245-4e74-9940-669c46fec506/volumes" Oct 01 05:43:19 crc kubenswrapper[4661]: I1001 05:43:19.776391 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-8d79f897-dcwff" event={"ID":"12436698-76a2-4877-8d43-8af3c769ec32","Type":"ContainerStarted","Data":"1485985d177f35b646f5fa8584265cde7211f1b1c31b1a9e97e727436dc88449"} Oct 01 05:43:22 crc kubenswrapper[4661]: I1001 05:43:22.238655 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ws7x6" Oct 01 05:43:22 crc kubenswrapper[4661]: I1001 05:43:22.239099 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ws7x6" Oct 01 05:43:22 crc kubenswrapper[4661]: I1001 05:43:22.295169 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ws7x6" Oct 01 05:43:22 crc kubenswrapper[4661]: I1001 05:43:22.879751 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ws7x6" Oct 01 05:43:24 crc kubenswrapper[4661]: I1001 05:43:24.823978 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-8d79f897-dcwff" event={"ID":"12436698-76a2-4877-8d43-8af3c769ec32","Type":"ContainerStarted","Data":"9a9993df8fc2a2bf74eb612716766fcec06e6bc299a478cea9cae9e6336d1fa4"} Oct 01 05:43:25 crc kubenswrapper[4661]: I1001 05:43:25.048925 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ws7x6"] Oct 01 05:43:25 crc kubenswrapper[4661]: I1001 05:43:25.049251 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ws7x6" podUID="9506d65d-858a-4eee-8b11-340a83061bb9" containerName="registry-server" containerID="cri-o://0382f123602447b247a442368b8a3498d7f71e28b9f2ca69b3dfd71b56c2e68a" gracePeriod=2 Oct 01 05:43:25 crc kubenswrapper[4661]: I1001 05:43:25.633475 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ws7x6" Oct 01 05:43:25 crc kubenswrapper[4661]: I1001 05:43:25.728603 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9506d65d-858a-4eee-8b11-340a83061bb9-catalog-content\") pod \"9506d65d-858a-4eee-8b11-340a83061bb9\" (UID: \"9506d65d-858a-4eee-8b11-340a83061bb9\") " Oct 01 05:43:25 crc kubenswrapper[4661]: I1001 05:43:25.728710 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2446t\" (UniqueName: \"kubernetes.io/projected/9506d65d-858a-4eee-8b11-340a83061bb9-kube-api-access-2446t\") pod \"9506d65d-858a-4eee-8b11-340a83061bb9\" (UID: \"9506d65d-858a-4eee-8b11-340a83061bb9\") " Oct 01 05:43:25 crc kubenswrapper[4661]: I1001 05:43:25.728807 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9506d65d-858a-4eee-8b11-340a83061bb9-utilities\") pod \"9506d65d-858a-4eee-8b11-340a83061bb9\" (UID: \"9506d65d-858a-4eee-8b11-340a83061bb9\") " Oct 01 05:43:25 crc kubenswrapper[4661]: I1001 05:43:25.730618 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9506d65d-858a-4eee-8b11-340a83061bb9-utilities" (OuterVolumeSpecName: "utilities") pod "9506d65d-858a-4eee-8b11-340a83061bb9" (UID: "9506d65d-858a-4eee-8b11-340a83061bb9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:43:25 crc kubenswrapper[4661]: I1001 05:43:25.735756 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9506d65d-858a-4eee-8b11-340a83061bb9-kube-api-access-2446t" (OuterVolumeSpecName: "kube-api-access-2446t") pod "9506d65d-858a-4eee-8b11-340a83061bb9" (UID: "9506d65d-858a-4eee-8b11-340a83061bb9"). InnerVolumeSpecName "kube-api-access-2446t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:43:25 crc kubenswrapper[4661]: I1001 05:43:25.830722 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2446t\" (UniqueName: \"kubernetes.io/projected/9506d65d-858a-4eee-8b11-340a83061bb9-kube-api-access-2446t\") on node \"crc\" DevicePath \"\"" Oct 01 05:43:25 crc kubenswrapper[4661]: I1001 05:43:25.830763 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9506d65d-858a-4eee-8b11-340a83061bb9-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 05:43:25 crc kubenswrapper[4661]: I1001 05:43:25.834900 4661 generic.go:334] "Generic (PLEG): container finished" podID="9506d65d-858a-4eee-8b11-340a83061bb9" containerID="0382f123602447b247a442368b8a3498d7f71e28b9f2ca69b3dfd71b56c2e68a" exitCode=0 Oct 01 05:43:25 crc kubenswrapper[4661]: I1001 05:43:25.834948 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ws7x6" event={"ID":"9506d65d-858a-4eee-8b11-340a83061bb9","Type":"ContainerDied","Data":"0382f123602447b247a442368b8a3498d7f71e28b9f2ca69b3dfd71b56c2e68a"} Oct 01 05:43:25 crc kubenswrapper[4661]: I1001 05:43:25.834961 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ws7x6" Oct 01 05:43:25 crc kubenswrapper[4661]: I1001 05:43:25.834982 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ws7x6" event={"ID":"9506d65d-858a-4eee-8b11-340a83061bb9","Type":"ContainerDied","Data":"059910ac1938da8d0d09f504088eecf361138893520729341fcaf2b78e0a974c"} Oct 01 05:43:25 crc kubenswrapper[4661]: I1001 05:43:25.835006 4661 scope.go:117] "RemoveContainer" containerID="0382f123602447b247a442368b8a3498d7f71e28b9f2ca69b3dfd71b56c2e68a" Oct 01 05:43:25 crc kubenswrapper[4661]: I1001 05:43:25.844780 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9506d65d-858a-4eee-8b11-340a83061bb9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9506d65d-858a-4eee-8b11-340a83061bb9" (UID: "9506d65d-858a-4eee-8b11-340a83061bb9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:43:25 crc kubenswrapper[4661]: I1001 05:43:25.932574 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9506d65d-858a-4eee-8b11-340a83061bb9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 05:43:26 crc kubenswrapper[4661]: I1001 05:43:26.172603 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ws7x6"] Oct 01 05:43:26 crc kubenswrapper[4661]: I1001 05:43:26.181100 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ws7x6"] Oct 01 05:43:26 crc kubenswrapper[4661]: I1001 05:43:26.637306 4661 scope.go:117] "RemoveContainer" containerID="80bdfbfaff1cf9ec14082037390140f7d1d17ad16f3199a302608dbd5b033e49" Oct 01 05:43:26 crc kubenswrapper[4661]: I1001 05:43:26.690654 4661 scope.go:117] "RemoveContainer" containerID="fed5cf3fed92ffce6301b662b5ae988a638f65d79f4265bfc39aefc9ad5f2d6d" Oct 01 05:43:26 crc kubenswrapper[4661]: I1001 05:43:26.718486 4661 scope.go:117] "RemoveContainer" containerID="0382f123602447b247a442368b8a3498d7f71e28b9f2ca69b3dfd71b56c2e68a" Oct 01 05:43:26 crc kubenswrapper[4661]: E1001 05:43:26.719068 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0382f123602447b247a442368b8a3498d7f71e28b9f2ca69b3dfd71b56c2e68a\": container with ID starting with 0382f123602447b247a442368b8a3498d7f71e28b9f2ca69b3dfd71b56c2e68a not found: ID does not exist" containerID="0382f123602447b247a442368b8a3498d7f71e28b9f2ca69b3dfd71b56c2e68a" Oct 01 05:43:26 crc kubenswrapper[4661]: I1001 05:43:26.719106 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0382f123602447b247a442368b8a3498d7f71e28b9f2ca69b3dfd71b56c2e68a"} err="failed to get container status \"0382f123602447b247a442368b8a3498d7f71e28b9f2ca69b3dfd71b56c2e68a\": rpc error: code = NotFound desc = could not find container \"0382f123602447b247a442368b8a3498d7f71e28b9f2ca69b3dfd71b56c2e68a\": container with ID starting with 0382f123602447b247a442368b8a3498d7f71e28b9f2ca69b3dfd71b56c2e68a not found: ID does not exist" Oct 01 05:43:26 crc kubenswrapper[4661]: I1001 05:43:26.719133 4661 scope.go:117] "RemoveContainer" containerID="80bdfbfaff1cf9ec14082037390140f7d1d17ad16f3199a302608dbd5b033e49" Oct 01 05:43:26 crc kubenswrapper[4661]: E1001 05:43:26.719727 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80bdfbfaff1cf9ec14082037390140f7d1d17ad16f3199a302608dbd5b033e49\": container with ID starting with 80bdfbfaff1cf9ec14082037390140f7d1d17ad16f3199a302608dbd5b033e49 not found: ID does not exist" containerID="80bdfbfaff1cf9ec14082037390140f7d1d17ad16f3199a302608dbd5b033e49" Oct 01 05:43:26 crc kubenswrapper[4661]: I1001 05:43:26.719791 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80bdfbfaff1cf9ec14082037390140f7d1d17ad16f3199a302608dbd5b033e49"} err="failed to get container status \"80bdfbfaff1cf9ec14082037390140f7d1d17ad16f3199a302608dbd5b033e49\": rpc error: code = NotFound desc = could not find container \"80bdfbfaff1cf9ec14082037390140f7d1d17ad16f3199a302608dbd5b033e49\": container with ID starting with 80bdfbfaff1cf9ec14082037390140f7d1d17ad16f3199a302608dbd5b033e49 not found: ID does not exist" Oct 01 05:43:26 crc kubenswrapper[4661]: I1001 05:43:26.719833 4661 scope.go:117] "RemoveContainer" containerID="fed5cf3fed92ffce6301b662b5ae988a638f65d79f4265bfc39aefc9ad5f2d6d" Oct 01 05:43:26 crc kubenswrapper[4661]: E1001 05:43:26.720695 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fed5cf3fed92ffce6301b662b5ae988a638f65d79f4265bfc39aefc9ad5f2d6d\": container with ID starting with fed5cf3fed92ffce6301b662b5ae988a638f65d79f4265bfc39aefc9ad5f2d6d not found: ID does not exist" containerID="fed5cf3fed92ffce6301b662b5ae988a638f65d79f4265bfc39aefc9ad5f2d6d" Oct 01 05:43:26 crc kubenswrapper[4661]: I1001 05:43:26.720738 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fed5cf3fed92ffce6301b662b5ae988a638f65d79f4265bfc39aefc9ad5f2d6d"} err="failed to get container status \"fed5cf3fed92ffce6301b662b5ae988a638f65d79f4265bfc39aefc9ad5f2d6d\": rpc error: code = NotFound desc = could not find container \"fed5cf3fed92ffce6301b662b5ae988a638f65d79f4265bfc39aefc9ad5f2d6d\": container with ID starting with fed5cf3fed92ffce6301b662b5ae988a638f65d79f4265bfc39aefc9ad5f2d6d not found: ID does not exist" Oct 01 05:43:27 crc kubenswrapper[4661]: I1001 05:43:27.769419 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9506d65d-858a-4eee-8b11-340a83061bb9" path="/var/lib/kubelet/pods/9506d65d-858a-4eee-8b11-340a83061bb9/volumes" Oct 01 05:43:27 crc kubenswrapper[4661]: I1001 05:43:27.855270 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-8d79f897-dcwff" event={"ID":"12436698-76a2-4877-8d43-8af3c769ec32","Type":"ContainerStarted","Data":"a22a336cc72bb4b7a7c3ecc5ed69b29bf9492a8c78f04f42650bdf6cabb5278e"} Oct 01 05:43:27 crc kubenswrapper[4661]: I1001 05:43:27.855591 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-8d79f897-dcwff" Oct 01 05:43:27 crc kubenswrapper[4661]: I1001 05:43:27.911159 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-8d79f897-dcwff" podStartSLOduration=2.937834391 podStartE2EDuration="10.911132593s" podCreationTimestamp="2025-10-01 05:43:17 +0000 UTC" firstStartedPulling="2025-10-01 05:43:18.748700268 +0000 UTC m=+847.686678922" lastFinishedPulling="2025-10-01 05:43:26.72199847 +0000 UTC m=+855.659977124" observedRunningTime="2025-10-01 05:43:27.906126595 +0000 UTC m=+856.844105219" watchObservedRunningTime="2025-10-01 05:43:27.911132593 +0000 UTC m=+856.849111247" Oct 01 05:43:28 crc kubenswrapper[4661]: I1001 05:43:28.273441 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-8d79f897-dcwff" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.309670 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.310458 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.384825 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859cd486d-qslns"] Oct 01 05:44:04 crc kubenswrapper[4661]: E1001 05:44:04.389880 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79065633-b245-4e74-9940-669c46fec506" containerName="extract-utilities" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.389907 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="79065633-b245-4e74-9940-669c46fec506" containerName="extract-utilities" Oct 01 05:44:04 crc kubenswrapper[4661]: E1001 05:44:04.389932 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9506d65d-858a-4eee-8b11-340a83061bb9" containerName="extract-content" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.389939 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="9506d65d-858a-4eee-8b11-340a83061bb9" containerName="extract-content" Oct 01 05:44:04 crc kubenswrapper[4661]: E1001 05:44:04.389956 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9506d65d-858a-4eee-8b11-340a83061bb9" containerName="extract-utilities" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.389962 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="9506d65d-858a-4eee-8b11-340a83061bb9" containerName="extract-utilities" Oct 01 05:44:04 crc kubenswrapper[4661]: E1001 05:44:04.389976 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9506d65d-858a-4eee-8b11-340a83061bb9" containerName="registry-server" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.389983 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="9506d65d-858a-4eee-8b11-340a83061bb9" containerName="registry-server" Oct 01 05:44:04 crc kubenswrapper[4661]: E1001 05:44:04.390038 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79065633-b245-4e74-9940-669c46fec506" containerName="registry-server" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.390044 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="79065633-b245-4e74-9940-669c46fec506" containerName="registry-server" Oct 01 05:44:04 crc kubenswrapper[4661]: E1001 05:44:04.390059 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79065633-b245-4e74-9940-669c46fec506" containerName="extract-content" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.390065 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="79065633-b245-4e74-9940-669c46fec506" containerName="extract-content" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.390336 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="79065633-b245-4e74-9940-669c46fec506" containerName="registry-server" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.390352 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="9506d65d-858a-4eee-8b11-340a83061bb9" containerName="registry-server" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.392541 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-qslns" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.398491 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-f7f98cb69-p6lbb"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.399435 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-qjjsd" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.404797 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859cd486d-qslns"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.404931 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-p6lbb" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.406328 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-f7f98cb69-p6lbb"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.409759 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-2lll9" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.427436 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-77fb7bcf5b-jszh9"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.428349 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-jszh9" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.433971 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-2dfcv" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.437470 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-8bc4775b5-fmrmk"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.438839 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-fmrmk" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.440212 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-d4wgb" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.456432 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-77fb7bcf5b-jszh9"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.474275 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-8bc4775b5-fmrmk"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.480742 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-679b4759bb-7w27g"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.481706 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5b4fc86755-8fsv8"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.482379 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-8fsv8" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.482776 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-7w27g" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.486346 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-nrv2c" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.486519 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-svzd6" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.492599 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-679b4759bb-7w27g"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.509265 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5b4fc86755-8fsv8"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.511966 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-9msgq"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.512802 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-9msgq" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.515554 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-rdgjw" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.521273 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.533695 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6f589bc7f7-7fjjd"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.536448 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6f589bc7f7-7fjjd" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.538162 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-vrl7z" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.546974 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-9msgq"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.553977 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drwhp\" (UniqueName: \"kubernetes.io/projected/29b41883-13c4-454f-a3d3-45aa0db29f82-kube-api-access-drwhp\") pod \"glance-operator-controller-manager-8bc4775b5-fmrmk\" (UID: \"29b41883-13c4-454f-a3d3-45aa0db29f82\") " pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-fmrmk" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.554028 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvl78\" (UniqueName: \"kubernetes.io/projected/1e3e3612-5d8a-4db4-af00-94428fcb570e-kube-api-access-tvl78\") pod \"barbican-operator-controller-manager-f7f98cb69-p6lbb\" (UID: \"1e3e3612-5d8a-4db4-af00-94428fcb570e\") " pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-p6lbb" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.554069 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjxqm\" (UniqueName: \"kubernetes.io/projected/dfdbd600-7bd1-43fa-a4cf-44f66f79e3e2-kube-api-access-kjxqm\") pod \"designate-operator-controller-manager-77fb7bcf5b-jszh9\" (UID: \"dfdbd600-7bd1-43fa-a4cf-44f66f79e3e2\") " pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-jszh9" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.554124 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5m6c\" (UniqueName: \"kubernetes.io/projected/a23b3c29-b18c-4ea0-8723-41000d6a754b-kube-api-access-m5m6c\") pod \"cinder-operator-controller-manager-859cd486d-qslns\" (UID: \"a23b3c29-b18c-4ea0-8723-41000d6a754b\") " pod="openstack-operators/cinder-operator-controller-manager-859cd486d-qslns" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.572991 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-59d7dc95cf-6l9mf"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.574021 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-6l9mf" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.574753 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6f589bc7f7-7fjjd"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.579604 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-59d7dc95cf-6l9mf"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.580230 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-pxn6z" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.602866 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf5bb885-nwsxg"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.604229 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-nwsxg" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.609462 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-6vcxc" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.612589 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-b7cf8cb5f-dxp6r"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.613559 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-dxp6r" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.614866 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-xj8jl" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.629124 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-b7cf8cb5f-dxp6r"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.639020 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf5bb885-nwsxg"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.655197 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjxqm\" (UniqueName: \"kubernetes.io/projected/dfdbd600-7bd1-43fa-a4cf-44f66f79e3e2-kube-api-access-kjxqm\") pod \"designate-operator-controller-manager-77fb7bcf5b-jszh9\" (UID: \"dfdbd600-7bd1-43fa-a4cf-44f66f79e3e2\") " pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-jszh9" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.655242 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmnnw\" (UniqueName: \"kubernetes.io/projected/50f20957-2408-4a65-a326-e3b76051b38b-kube-api-access-wmnnw\") pod \"ironic-operator-controller-manager-6f589bc7f7-7fjjd\" (UID: \"50f20957-2408-4a65-a326-e3b76051b38b\") " pod="openstack-operators/ironic-operator-controller-manager-6f589bc7f7-7fjjd" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.655289 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6h7z\" (UniqueName: \"kubernetes.io/projected/12785d9c-9cdb-4c80-bc4b-ee398e655992-kube-api-access-b6h7z\") pod \"heat-operator-controller-manager-5b4fc86755-8fsv8\" (UID: \"12785d9c-9cdb-4c80-bc4b-ee398e655992\") " pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-8fsv8" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.655321 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5m6c\" (UniqueName: \"kubernetes.io/projected/a23b3c29-b18c-4ea0-8723-41000d6a754b-kube-api-access-m5m6c\") pod \"cinder-operator-controller-manager-859cd486d-qslns\" (UID: \"a23b3c29-b18c-4ea0-8723-41000d6a754b\") " pod="openstack-operators/cinder-operator-controller-manager-859cd486d-qslns" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.655346 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drwhp\" (UniqueName: \"kubernetes.io/projected/29b41883-13c4-454f-a3d3-45aa0db29f82-kube-api-access-drwhp\") pod \"glance-operator-controller-manager-8bc4775b5-fmrmk\" (UID: \"29b41883-13c4-454f-a3d3-45aa0db29f82\") " pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-fmrmk" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.655369 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvl78\" (UniqueName: \"kubernetes.io/projected/1e3e3612-5d8a-4db4-af00-94428fcb570e-kube-api-access-tvl78\") pod \"barbican-operator-controller-manager-f7f98cb69-p6lbb\" (UID: \"1e3e3612-5d8a-4db4-af00-94428fcb570e\") " pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-p6lbb" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.655387 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0e131827-21a2-4464-80d3-7528c1d8c52a-cert\") pod \"infra-operator-controller-manager-5c8fdc4d5c-9msgq\" (UID: \"0e131827-21a2-4464-80d3-7528c1d8c52a\") " pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-9msgq" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.655405 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gqgg\" (UniqueName: \"kubernetes.io/projected/0e131827-21a2-4464-80d3-7528c1d8c52a-kube-api-access-6gqgg\") pod \"infra-operator-controller-manager-5c8fdc4d5c-9msgq\" (UID: \"0e131827-21a2-4464-80d3-7528c1d8c52a\") " pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-9msgq" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.655425 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9b6kn\" (UniqueName: \"kubernetes.io/projected/001f8e02-2d35-4c68-88ae-4d732588213c-kube-api-access-9b6kn\") pod \"horizon-operator-controller-manager-679b4759bb-7w27g\" (UID: \"001f8e02-2d35-4c68-88ae-4d732588213c\") " pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-7w27g" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.659050 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-79f9fc9fd8-68scz"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.662338 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-79f9fc9fd8-68scz" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.665300 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-ljcnn" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.682143 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6b96467f46-8kwhx"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.683214 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-6b96467f46-8kwhx" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.686842 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-79f9fc9fd8-68scz"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.686902 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-xbl96" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.694333 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6b96467f46-8kwhx"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.701504 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drwhp\" (UniqueName: \"kubernetes.io/projected/29b41883-13c4-454f-a3d3-45aa0db29f82-kube-api-access-drwhp\") pod \"glance-operator-controller-manager-8bc4775b5-fmrmk\" (UID: \"29b41883-13c4-454f-a3d3-45aa0db29f82\") " pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-fmrmk" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.701495 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvl78\" (UniqueName: \"kubernetes.io/projected/1e3e3612-5d8a-4db4-af00-94428fcb570e-kube-api-access-tvl78\") pod \"barbican-operator-controller-manager-f7f98cb69-p6lbb\" (UID: \"1e3e3612-5d8a-4db4-af00-94428fcb570e\") " pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-p6lbb" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.701522 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjxqm\" (UniqueName: \"kubernetes.io/projected/dfdbd600-7bd1-43fa-a4cf-44f66f79e3e2-kube-api-access-kjxqm\") pod \"designate-operator-controller-manager-77fb7bcf5b-jszh9\" (UID: \"dfdbd600-7bd1-43fa-a4cf-44f66f79e3e2\") " pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-jszh9" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.702179 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-f248r"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.703233 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-f248r" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.706286 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-hdknz" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.706410 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-f248r"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.711487 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5m6c\" (UniqueName: \"kubernetes.io/projected/a23b3c29-b18c-4ea0-8723-41000d6a754b-kube-api-access-m5m6c\") pod \"cinder-operator-controller-manager-859cd486d-qslns\" (UID: \"a23b3c29-b18c-4ea0-8723-41000d6a754b\") " pod="openstack-operators/cinder-operator-controller-manager-859cd486d-qslns" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.722740 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.726862 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-qslns" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.731081 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.733520 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-qb56t" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.733588 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.744827 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-p6lbb" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.753829 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-jszh9" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.758126 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmnnw\" (UniqueName: \"kubernetes.io/projected/50f20957-2408-4a65-a326-e3b76051b38b-kube-api-access-wmnnw\") pod \"ironic-operator-controller-manager-6f589bc7f7-7fjjd\" (UID: \"50f20957-2408-4a65-a326-e3b76051b38b\") " pod="openstack-operators/ironic-operator-controller-manager-6f589bc7f7-7fjjd" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.758178 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czhpd\" (UniqueName: \"kubernetes.io/projected/9d7506fd-1133-4927-872d-c68c525cba62-kube-api-access-czhpd\") pod \"mariadb-operator-controller-manager-67bf5bb885-nwsxg\" (UID: \"9d7506fd-1133-4927-872d-c68c525cba62\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-nwsxg" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.758200 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9b52\" (UniqueName: \"kubernetes.io/projected/c0601966-5144-438f-a862-3f397e7064a4-kube-api-access-q9b52\") pod \"keystone-operator-controller-manager-59d7dc95cf-6l9mf\" (UID: \"c0601966-5144-438f-a862-3f397e7064a4\") " pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-6l9mf" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.758229 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6h7z\" (UniqueName: \"kubernetes.io/projected/12785d9c-9cdb-4c80-bc4b-ee398e655992-kube-api-access-b6h7z\") pod \"heat-operator-controller-manager-5b4fc86755-8fsv8\" (UID: \"12785d9c-9cdb-4c80-bc4b-ee398e655992\") " pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-8fsv8" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.758265 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ln4cg\" (UniqueName: \"kubernetes.io/projected/34d73c0f-65b8-4718-8409-849ac307168f-kube-api-access-ln4cg\") pod \"nova-operator-controller-manager-79f9fc9fd8-68scz\" (UID: \"34d73c0f-65b8-4718-8409-849ac307168f\") " pod="openstack-operators/nova-operator-controller-manager-79f9fc9fd8-68scz" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.758296 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0e131827-21a2-4464-80d3-7528c1d8c52a-cert\") pod \"infra-operator-controller-manager-5c8fdc4d5c-9msgq\" (UID: \"0e131827-21a2-4464-80d3-7528c1d8c52a\") " pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-9msgq" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.758319 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gqgg\" (UniqueName: \"kubernetes.io/projected/0e131827-21a2-4464-80d3-7528c1d8c52a-kube-api-access-6gqgg\") pod \"infra-operator-controller-manager-5c8fdc4d5c-9msgq\" (UID: \"0e131827-21a2-4464-80d3-7528c1d8c52a\") " pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-9msgq" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.758343 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfmg4\" (UniqueName: \"kubernetes.io/projected/f020bcbc-c80b-4465-9733-204a86325234-kube-api-access-vfmg4\") pod \"manila-operator-controller-manager-b7cf8cb5f-dxp6r\" (UID: \"f020bcbc-c80b-4465-9733-204a86325234\") " pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-dxp6r" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.758364 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9b6kn\" (UniqueName: \"kubernetes.io/projected/001f8e02-2d35-4c68-88ae-4d732588213c-kube-api-access-9b6kn\") pod \"horizon-operator-controller-manager-679b4759bb-7w27g\" (UID: \"001f8e02-2d35-4c68-88ae-4d732588213c\") " pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-7w27g" Oct 01 05:44:04 crc kubenswrapper[4661]: E1001 05:44:04.759496 4661 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Oct 01 05:44:04 crc kubenswrapper[4661]: E1001 05:44:04.759582 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0e131827-21a2-4464-80d3-7528c1d8c52a-cert podName:0e131827-21a2-4464-80d3-7528c1d8c52a nodeName:}" failed. No retries permitted until 2025-10-01 05:44:05.259552176 +0000 UTC m=+894.197530790 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/0e131827-21a2-4464-80d3-7528c1d8c52a-cert") pod "infra-operator-controller-manager-5c8fdc4d5c-9msgq" (UID: "0e131827-21a2-4464-80d3-7528c1d8c52a") : secret "infra-operator-webhook-server-cert" not found Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.770866 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-fmrmk" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.782404 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-84c745747f-lhhfj"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.783573 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-lhhfj" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.786261 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-598c4c8547-xs8zb"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.787573 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-xs8zb" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.790197 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-5z297" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.790361 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-gp9h4" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.814243 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gqgg\" (UniqueName: \"kubernetes.io/projected/0e131827-21a2-4464-80d3-7528c1d8c52a-kube-api-access-6gqgg\") pod \"infra-operator-controller-manager-5c8fdc4d5c-9msgq\" (UID: \"0e131827-21a2-4464-80d3-7528c1d8c52a\") " pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-9msgq" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.822069 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9b6kn\" (UniqueName: \"kubernetes.io/projected/001f8e02-2d35-4c68-88ae-4d732588213c-kube-api-access-9b6kn\") pod \"horizon-operator-controller-manager-679b4759bb-7w27g\" (UID: \"001f8e02-2d35-4c68-88ae-4d732588213c\") " pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-7w27g" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.832456 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmnnw\" (UniqueName: \"kubernetes.io/projected/50f20957-2408-4a65-a326-e3b76051b38b-kube-api-access-wmnnw\") pod \"ironic-operator-controller-manager-6f589bc7f7-7fjjd\" (UID: \"50f20957-2408-4a65-a326-e3b76051b38b\") " pod="openstack-operators/ironic-operator-controller-manager-6f589bc7f7-7fjjd" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.836882 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-7w27g" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.841375 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6h7z\" (UniqueName: \"kubernetes.io/projected/12785d9c-9cdb-4c80-bc4b-ee398e655992-kube-api-access-b6h7z\") pod \"heat-operator-controller-manager-5b4fc86755-8fsv8\" (UID: \"12785d9c-9cdb-4c80-bc4b-ee398e655992\") " pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-8fsv8" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.852752 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-657c6b68c7-cv29g"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.854136 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-cv29g" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.860363 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ln4cg\" (UniqueName: \"kubernetes.io/projected/34d73c0f-65b8-4718-8409-849ac307168f-kube-api-access-ln4cg\") pod \"nova-operator-controller-manager-79f9fc9fd8-68scz\" (UID: \"34d73c0f-65b8-4718-8409-849ac307168f\") " pod="openstack-operators/nova-operator-controller-manager-79f9fc9fd8-68scz" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.860538 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfmg4\" (UniqueName: \"kubernetes.io/projected/f020bcbc-c80b-4465-9733-204a86325234-kube-api-access-vfmg4\") pod \"manila-operator-controller-manager-b7cf8cb5f-dxp6r\" (UID: \"f020bcbc-c80b-4465-9733-204a86325234\") " pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-dxp6r" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.860677 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb-cert\") pod \"openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k\" (UID: \"8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.860759 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mfkp\" (UniqueName: \"kubernetes.io/projected/8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb-kube-api-access-9mfkp\") pod \"openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k\" (UID: \"8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.860844 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9zp8\" (UniqueName: \"kubernetes.io/projected/5fbac0a5-84db-4228-9c2c-93d8d551044d-kube-api-access-j9zp8\") pod \"octavia-operator-controller-manager-6fb7d6b8bf-f248r\" (UID: \"5fbac0a5-84db-4228-9c2c-93d8d551044d\") " pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-f248r" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.860981 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8j94\" (UniqueName: \"kubernetes.io/projected/2e8d1048-762f-4737-82c2-c6244072bf9d-kube-api-access-f8j94\") pod \"neutron-operator-controller-manager-6b96467f46-8kwhx\" (UID: \"2e8d1048-762f-4737-82c2-c6244072bf9d\") " pod="openstack-operators/neutron-operator-controller-manager-6b96467f46-8kwhx" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.861104 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czhpd\" (UniqueName: \"kubernetes.io/projected/9d7506fd-1133-4927-872d-c68c525cba62-kube-api-access-czhpd\") pod \"mariadb-operator-controller-manager-67bf5bb885-nwsxg\" (UID: \"9d7506fd-1133-4927-872d-c68c525cba62\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-nwsxg" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.861214 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9b52\" (UniqueName: \"kubernetes.io/projected/c0601966-5144-438f-a862-3f397e7064a4-kube-api-access-q9b52\") pod \"keystone-operator-controller-manager-59d7dc95cf-6l9mf\" (UID: \"c0601966-5144-438f-a862-3f397e7064a4\") " pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-6l9mf" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.862144 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-vlmkd" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.866587 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6f589bc7f7-7fjjd" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.887651 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-84c745747f-lhhfj"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.893555 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-598c4c8547-xs8zb"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.897572 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfmg4\" (UniqueName: \"kubernetes.io/projected/f020bcbc-c80b-4465-9733-204a86325234-kube-api-access-vfmg4\") pod \"manila-operator-controller-manager-b7cf8cb5f-dxp6r\" (UID: \"f020bcbc-c80b-4465-9733-204a86325234\") " pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-dxp6r" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.898990 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czhpd\" (UniqueName: \"kubernetes.io/projected/9d7506fd-1133-4927-872d-c68c525cba62-kube-api-access-czhpd\") pod \"mariadb-operator-controller-manager-67bf5bb885-nwsxg\" (UID: \"9d7506fd-1133-4927-872d-c68c525cba62\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-nwsxg" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.900295 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9b52\" (UniqueName: \"kubernetes.io/projected/c0601966-5144-438f-a862-3f397e7064a4-kube-api-access-q9b52\") pod \"keystone-operator-controller-manager-59d7dc95cf-6l9mf\" (UID: \"c0601966-5144-438f-a862-3f397e7064a4\") " pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-6l9mf" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.910120 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.910578 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ln4cg\" (UniqueName: \"kubernetes.io/projected/34d73c0f-65b8-4718-8409-849ac307168f-kube-api-access-ln4cg\") pod \"nova-operator-controller-manager-79f9fc9fd8-68scz\" (UID: \"34d73c0f-65b8-4718-8409-849ac307168f\") " pod="openstack-operators/nova-operator-controller-manager-79f9fc9fd8-68scz" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.922766 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-657c6b68c7-cv29g"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.923027 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-nwsxg" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.941043 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-dxp6r" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.941972 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-cb66d6b59-cd2rk"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.943018 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-cd2rk" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.952701 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-cb66d6b59-cd2rk"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.955505 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-ndr8j" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.962462 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8j94\" (UniqueName: \"kubernetes.io/projected/2e8d1048-762f-4737-82c2-c6244072bf9d-kube-api-access-f8j94\") pod \"neutron-operator-controller-manager-6b96467f46-8kwhx\" (UID: \"2e8d1048-762f-4737-82c2-c6244072bf9d\") " pod="openstack-operators/neutron-operator-controller-manager-6b96467f46-8kwhx" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.962549 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mg2ts\" (UniqueName: \"kubernetes.io/projected/3ad43452-d673-42a7-8495-887b5e93cacb-kube-api-access-mg2ts\") pod \"placement-operator-controller-manager-598c4c8547-xs8zb\" (UID: \"3ad43452-d673-42a7-8495-887b5e93cacb\") " pod="openstack-operators/placement-operator-controller-manager-598c4c8547-xs8zb" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.962591 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5qmb\" (UniqueName: \"kubernetes.io/projected/a9e55ad4-d1a5-4830-96db-02c95384650c-kube-api-access-w5qmb\") pod \"swift-operator-controller-manager-657c6b68c7-cv29g\" (UID: \"a9e55ad4-d1a5-4830-96db-02c95384650c\") " pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-cv29g" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.962616 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnjm7\" (UniqueName: \"kubernetes.io/projected/01c6b4d6-55f6-4837-af54-9eb764262d03-kube-api-access-pnjm7\") pod \"ovn-operator-controller-manager-84c745747f-lhhfj\" (UID: \"01c6b4d6-55f6-4837-af54-9eb764262d03\") " pod="openstack-operators/ovn-operator-controller-manager-84c745747f-lhhfj" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.962656 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb-cert\") pod \"openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k\" (UID: \"8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.962672 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mfkp\" (UniqueName: \"kubernetes.io/projected/8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb-kube-api-access-9mfkp\") pod \"openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k\" (UID: \"8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.962689 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9zp8\" (UniqueName: \"kubernetes.io/projected/5fbac0a5-84db-4228-9c2c-93d8d551044d-kube-api-access-j9zp8\") pod \"octavia-operator-controller-manager-6fb7d6b8bf-f248r\" (UID: \"5fbac0a5-84db-4228-9c2c-93d8d551044d\") " pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-f248r" Oct 01 05:44:04 crc kubenswrapper[4661]: E1001 05:44:04.964185 4661 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 01 05:44:04 crc kubenswrapper[4661]: E1001 05:44:04.964239 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb-cert podName:8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb nodeName:}" failed. No retries permitted until 2025-10-01 05:44:05.464223684 +0000 UTC m=+894.402202298 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb-cert") pod "openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k" (UID: "8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.967322 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-6bb97fcf96-jsj4d"] Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.968479 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-jsj4d" Oct 01 05:44:04 crc kubenswrapper[4661]: I1001 05:44:04.971996 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-4mg9r" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.002699 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-6bb97fcf96-jsj4d"] Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.004913 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-79f9fc9fd8-68scz" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.008567 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mfkp\" (UniqueName: \"kubernetes.io/projected/8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb-kube-api-access-9mfkp\") pod \"openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k\" (UID: \"8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.011210 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9zp8\" (UniqueName: \"kubernetes.io/projected/5fbac0a5-84db-4228-9c2c-93d8d551044d-kube-api-access-j9zp8\") pod \"octavia-operator-controller-manager-6fb7d6b8bf-f248r\" (UID: \"5fbac0a5-84db-4228-9c2c-93d8d551044d\") " pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-f248r" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.015714 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8j94\" (UniqueName: \"kubernetes.io/projected/2e8d1048-762f-4737-82c2-c6244072bf9d-kube-api-access-f8j94\") pod \"neutron-operator-controller-manager-6b96467f46-8kwhx\" (UID: \"2e8d1048-762f-4737-82c2-c6244072bf9d\") " pod="openstack-operators/neutron-operator-controller-manager-6b96467f46-8kwhx" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.040699 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-75756dd4d9-2hjs8"] Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.041842 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-2hjs8" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.056382 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-48fpq" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.064995 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-75756dd4d9-2hjs8"] Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.065421 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-6b96467f46-8kwhx" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.067271 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56dw4\" (UniqueName: \"kubernetes.io/projected/5826e57e-36e6-43e0-8141-7e6e3ae936a6-kube-api-access-56dw4\") pod \"telemetry-operator-controller-manager-cb66d6b59-cd2rk\" (UID: \"5826e57e-36e6-43e0-8141-7e6e3ae936a6\") " pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-cd2rk" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.067314 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtfww\" (UniqueName: \"kubernetes.io/projected/838837a9-4076-41ba-91e4-44055ce7c97a-kube-api-access-dtfww\") pod \"test-operator-controller-manager-6bb97fcf96-jsj4d\" (UID: \"838837a9-4076-41ba-91e4-44055ce7c97a\") " pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-jsj4d" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.067356 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mg2ts\" (UniqueName: \"kubernetes.io/projected/3ad43452-d673-42a7-8495-887b5e93cacb-kube-api-access-mg2ts\") pod \"placement-operator-controller-manager-598c4c8547-xs8zb\" (UID: \"3ad43452-d673-42a7-8495-887b5e93cacb\") " pod="openstack-operators/placement-operator-controller-manager-598c4c8547-xs8zb" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.067402 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5qmb\" (UniqueName: \"kubernetes.io/projected/a9e55ad4-d1a5-4830-96db-02c95384650c-kube-api-access-w5qmb\") pod \"swift-operator-controller-manager-657c6b68c7-cv29g\" (UID: \"a9e55ad4-d1a5-4830-96db-02c95384650c\") " pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-cv29g" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.067441 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnjm7\" (UniqueName: \"kubernetes.io/projected/01c6b4d6-55f6-4837-af54-9eb764262d03-kube-api-access-pnjm7\") pod \"ovn-operator-controller-manager-84c745747f-lhhfj\" (UID: \"01c6b4d6-55f6-4837-af54-9eb764262d03\") " pod="openstack-operators/ovn-operator-controller-manager-84c745747f-lhhfj" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.093781 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnjm7\" (UniqueName: \"kubernetes.io/projected/01c6b4d6-55f6-4837-af54-9eb764262d03-kube-api-access-pnjm7\") pod \"ovn-operator-controller-manager-84c745747f-lhhfj\" (UID: \"01c6b4d6-55f6-4837-af54-9eb764262d03\") " pod="openstack-operators/ovn-operator-controller-manager-84c745747f-lhhfj" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.096470 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mg2ts\" (UniqueName: \"kubernetes.io/projected/3ad43452-d673-42a7-8495-887b5e93cacb-kube-api-access-mg2ts\") pod \"placement-operator-controller-manager-598c4c8547-xs8zb\" (UID: \"3ad43452-d673-42a7-8495-887b5e93cacb\") " pod="openstack-operators/placement-operator-controller-manager-598c4c8547-xs8zb" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.109455 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5qmb\" (UniqueName: \"kubernetes.io/projected/a9e55ad4-d1a5-4830-96db-02c95384650c-kube-api-access-w5qmb\") pod \"swift-operator-controller-manager-657c6b68c7-cv29g\" (UID: \"a9e55ad4-d1a5-4830-96db-02c95384650c\") " pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-cv29g" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.126924 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-8fsv8" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.129259 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-8648d97544-tlsf9"] Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.130464 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-8648d97544-tlsf9" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.133032 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.136982 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-cclp7" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.172421 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56dw4\" (UniqueName: \"kubernetes.io/projected/5826e57e-36e6-43e0-8141-7e6e3ae936a6-kube-api-access-56dw4\") pod \"telemetry-operator-controller-manager-cb66d6b59-cd2rk\" (UID: \"5826e57e-36e6-43e0-8141-7e6e3ae936a6\") " pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-cd2rk" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.172464 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtfww\" (UniqueName: \"kubernetes.io/projected/838837a9-4076-41ba-91e4-44055ce7c97a-kube-api-access-dtfww\") pod \"test-operator-controller-manager-6bb97fcf96-jsj4d\" (UID: \"838837a9-4076-41ba-91e4-44055ce7c97a\") " pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-jsj4d" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.173440 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-f248r" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.182719 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-8648d97544-tlsf9"] Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.185490 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fl8h\" (UniqueName: \"kubernetes.io/projected/1789dd27-2b5e-46e6-9260-affd4daf86cb-kube-api-access-2fl8h\") pod \"watcher-operator-controller-manager-75756dd4d9-2hjs8\" (UID: \"1789dd27-2b5e-46e6-9260-affd4daf86cb\") " pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-2hjs8" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.192203 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-6l9mf" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.201200 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtfww\" (UniqueName: \"kubernetes.io/projected/838837a9-4076-41ba-91e4-44055ce7c97a-kube-api-access-dtfww\") pod \"test-operator-controller-manager-6bb97fcf96-jsj4d\" (UID: \"838837a9-4076-41ba-91e4-44055ce7c97a\") " pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-jsj4d" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.208815 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56dw4\" (UniqueName: \"kubernetes.io/projected/5826e57e-36e6-43e0-8141-7e6e3ae936a6-kube-api-access-56dw4\") pod \"telemetry-operator-controller-manager-cb66d6b59-cd2rk\" (UID: \"5826e57e-36e6-43e0-8141-7e6e3ae936a6\") " pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-cd2rk" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.208977 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-lhhfj" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.220496 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-xs8zb" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.242208 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-cv29g" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.252689 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-5n77c"] Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.253648 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-5n77c" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.256577 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-5n77c"] Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.273076 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-r7ngm" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.286788 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/68f9b331-6beb-4cda-884c-326180cb52c8-cert\") pod \"openstack-operator-controller-manager-8648d97544-tlsf9\" (UID: \"68f9b331-6beb-4cda-884c-326180cb52c8\") " pod="openstack-operators/openstack-operator-controller-manager-8648d97544-tlsf9" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.286874 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0e131827-21a2-4464-80d3-7528c1d8c52a-cert\") pod \"infra-operator-controller-manager-5c8fdc4d5c-9msgq\" (UID: \"0e131827-21a2-4464-80d3-7528c1d8c52a\") " pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-9msgq" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.286901 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sv4rn\" (UniqueName: \"kubernetes.io/projected/68f9b331-6beb-4cda-884c-326180cb52c8-kube-api-access-sv4rn\") pod \"openstack-operator-controller-manager-8648d97544-tlsf9\" (UID: \"68f9b331-6beb-4cda-884c-326180cb52c8\") " pod="openstack-operators/openstack-operator-controller-manager-8648d97544-tlsf9" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.286926 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fl8h\" (UniqueName: \"kubernetes.io/projected/1789dd27-2b5e-46e6-9260-affd4daf86cb-kube-api-access-2fl8h\") pod \"watcher-operator-controller-manager-75756dd4d9-2hjs8\" (UID: \"1789dd27-2b5e-46e6-9260-affd4daf86cb\") " pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-2hjs8" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.299779 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0e131827-21a2-4464-80d3-7528c1d8c52a-cert\") pod \"infra-operator-controller-manager-5c8fdc4d5c-9msgq\" (UID: \"0e131827-21a2-4464-80d3-7528c1d8c52a\") " pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-9msgq" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.325383 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fl8h\" (UniqueName: \"kubernetes.io/projected/1789dd27-2b5e-46e6-9260-affd4daf86cb-kube-api-access-2fl8h\") pod \"watcher-operator-controller-manager-75756dd4d9-2hjs8\" (UID: \"1789dd27-2b5e-46e6-9260-affd4daf86cb\") " pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-2hjs8" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.325698 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-cd2rk" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.357650 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-jsj4d" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.392282 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wswjg\" (UniqueName: \"kubernetes.io/projected/ff6dc8a8-7a0a-47db-9aec-dbc1f8236beb-kube-api-access-wswjg\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-5n77c\" (UID: \"ff6dc8a8-7a0a-47db-9aec-dbc1f8236beb\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-5n77c" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.392425 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/68f9b331-6beb-4cda-884c-326180cb52c8-cert\") pod \"openstack-operator-controller-manager-8648d97544-tlsf9\" (UID: \"68f9b331-6beb-4cda-884c-326180cb52c8\") " pod="openstack-operators/openstack-operator-controller-manager-8648d97544-tlsf9" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.392553 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sv4rn\" (UniqueName: \"kubernetes.io/projected/68f9b331-6beb-4cda-884c-326180cb52c8-kube-api-access-sv4rn\") pod \"openstack-operator-controller-manager-8648d97544-tlsf9\" (UID: \"68f9b331-6beb-4cda-884c-326180cb52c8\") " pod="openstack-operators/openstack-operator-controller-manager-8648d97544-tlsf9" Oct 01 05:44:05 crc kubenswrapper[4661]: E1001 05:44:05.393017 4661 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Oct 01 05:44:05 crc kubenswrapper[4661]: E1001 05:44:05.393063 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/68f9b331-6beb-4cda-884c-326180cb52c8-cert podName:68f9b331-6beb-4cda-884c-326180cb52c8 nodeName:}" failed. No retries permitted until 2025-10-01 05:44:05.893048919 +0000 UTC m=+894.831027533 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/68f9b331-6beb-4cda-884c-326180cb52c8-cert") pod "openstack-operator-controller-manager-8648d97544-tlsf9" (UID: "68f9b331-6beb-4cda-884c-326180cb52c8") : secret "webhook-server-cert" not found Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.396025 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-2hjs8" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.415129 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sv4rn\" (UniqueName: \"kubernetes.io/projected/68f9b331-6beb-4cda-884c-326180cb52c8-kube-api-access-sv4rn\") pod \"openstack-operator-controller-manager-8648d97544-tlsf9\" (UID: \"68f9b331-6beb-4cda-884c-326180cb52c8\") " pod="openstack-operators/openstack-operator-controller-manager-8648d97544-tlsf9" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.451069 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-9msgq" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.499583 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wswjg\" (UniqueName: \"kubernetes.io/projected/ff6dc8a8-7a0a-47db-9aec-dbc1f8236beb-kube-api-access-wswjg\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-5n77c\" (UID: \"ff6dc8a8-7a0a-47db-9aec-dbc1f8236beb\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-5n77c" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.499642 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb-cert\") pod \"openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k\" (UID: \"8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k" Oct 01 05:44:05 crc kubenswrapper[4661]: E1001 05:44:05.499830 4661 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 01 05:44:05 crc kubenswrapper[4661]: E1001 05:44:05.499873 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb-cert podName:8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb nodeName:}" failed. No retries permitted until 2025-10-01 05:44:06.499860432 +0000 UTC m=+895.437839046 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb-cert") pod "openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k" (UID: "8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.527438 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wswjg\" (UniqueName: \"kubernetes.io/projected/ff6dc8a8-7a0a-47db-9aec-dbc1f8236beb-kube-api-access-wswjg\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-5n77c\" (UID: \"ff6dc8a8-7a0a-47db-9aec-dbc1f8236beb\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-5n77c" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.650316 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-5n77c" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.790314 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859cd486d-qslns"] Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.837195 4661 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.905904 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/68f9b331-6beb-4cda-884c-326180cb52c8-cert\") pod \"openstack-operator-controller-manager-8648d97544-tlsf9\" (UID: \"68f9b331-6beb-4cda-884c-326180cb52c8\") " pod="openstack-operators/openstack-operator-controller-manager-8648d97544-tlsf9" Oct 01 05:44:05 crc kubenswrapper[4661]: I1001 05:44:05.929125 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/68f9b331-6beb-4cda-884c-326180cb52c8-cert\") pod \"openstack-operator-controller-manager-8648d97544-tlsf9\" (UID: \"68f9b331-6beb-4cda-884c-326180cb52c8\") " pod="openstack-operators/openstack-operator-controller-manager-8648d97544-tlsf9" Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.079453 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-8648d97544-tlsf9" Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.128739 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-79f9fc9fd8-68scz"] Oct 01 05:44:06 crc kubenswrapper[4661]: W1001 05:44:06.131566 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddfdbd600_7bd1_43fa_a4cf_44f66f79e3e2.slice/crio-2908a5a626b3cbca86239d351e798b6e16a4246d1f09a01e757ec6390fd72a71 WatchSource:0}: Error finding container 2908a5a626b3cbca86239d351e798b6e16a4246d1f09a01e757ec6390fd72a71: Status 404 returned error can't find the container with id 2908a5a626b3cbca86239d351e798b6e16a4246d1f09a01e757ec6390fd72a71 Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.137566 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-77fb7bcf5b-jszh9"] Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.148240 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-f7f98cb69-p6lbb"] Oct 01 05:44:06 crc kubenswrapper[4661]: W1001 05:44:06.161082 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1e3e3612_5d8a_4db4_af00_94428fcb570e.slice/crio-d3b5d9479e8f10771b69d55f952e1db52da920c6fcd899b391f79bea1e7f71f7 WatchSource:0}: Error finding container d3b5d9479e8f10771b69d55f952e1db52da920c6fcd899b391f79bea1e7f71f7: Status 404 returned error can't find the container with id d3b5d9479e8f10771b69d55f952e1db52da920c6fcd899b391f79bea1e7f71f7 Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.204161 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-qslns" event={"ID":"a23b3c29-b18c-4ea0-8723-41000d6a754b","Type":"ContainerStarted","Data":"1364ab9b112ef7f26c2cc389b29b8e239132269e804c70372bcb8db9a1e3a867"} Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.205104 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-jszh9" event={"ID":"dfdbd600-7bd1-43fa-a4cf-44f66f79e3e2","Type":"ContainerStarted","Data":"2908a5a626b3cbca86239d351e798b6e16a4246d1f09a01e757ec6390fd72a71"} Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.205864 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-p6lbb" event={"ID":"1e3e3612-5d8a-4db4-af00-94428fcb570e","Type":"ContainerStarted","Data":"d3b5d9479e8f10771b69d55f952e1db52da920c6fcd899b391f79bea1e7f71f7"} Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.206587 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79f9fc9fd8-68scz" event={"ID":"34d73c0f-65b8-4718-8409-849ac307168f","Type":"ContainerStarted","Data":"ae426355a764ed13a7155ebc8d23dea9fb69a2bf7aae75f9fc27cbbde86576f5"} Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.515752 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb-cert\") pod \"openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k\" (UID: \"8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k" Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.520458 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb-cert\") pod \"openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k\" (UID: \"8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k" Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.529619 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6f589bc7f7-7fjjd"] Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.544857 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf5bb885-nwsxg"] Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.550893 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-75756dd4d9-2hjs8"] Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.563185 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-9msgq"] Oct 01 05:44:06 crc kubenswrapper[4661]: W1001 05:44:06.577108 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod50f20957_2408_4a65_a326_e3b76051b38b.slice/crio-f58737d25e8e85e7af9996c33a48cf8737f4c3c49cdbe31b990007ea00e23528 WatchSource:0}: Error finding container f58737d25e8e85e7af9996c33a48cf8737f4c3c49cdbe31b990007ea00e23528: Status 404 returned error can't find the container with id f58737d25e8e85e7af9996c33a48cf8737f4c3c49cdbe31b990007ea00e23528 Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.577157 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5b4fc86755-8fsv8"] Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.587493 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-679b4759bb-7w27g"] Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.599170 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-b7cf8cb5f-dxp6r"] Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.605060 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6b96467f46-8kwhx"] Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.615723 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-cb66d6b59-cd2rk"] Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.623032 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-8bc4775b5-fmrmk"] Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.627735 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-f248r"] Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.635705 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-84c745747f-lhhfj"] Oct 01 05:44:06 crc kubenswrapper[4661]: W1001 05:44:06.641711 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2e8d1048_762f_4737_82c2_c6244072bf9d.slice/crio-30c2a96b8613fe444d912784a2b4f2395d822e7719684a3251e151c643fa4ddd WatchSource:0}: Error finding container 30c2a96b8613fe444d912784a2b4f2395d822e7719684a3251e151c643fa4ddd: Status 404 returned error can't find the container with id 30c2a96b8613fe444d912784a2b4f2395d822e7719684a3251e151c643fa4ddd Oct 01 05:44:06 crc kubenswrapper[4661]: W1001 05:44:06.641923 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1789dd27_2b5e_46e6_9260_affd4daf86cb.slice/crio-0d9241aa21e6b26b90e373a2d9e568993d08049429ca382562d252264ca83d9f WatchSource:0}: Error finding container 0d9241aa21e6b26b90e373a2d9e568993d08049429ca382562d252264ca83d9f: Status 404 returned error can't find the container with id 0d9241aa21e6b26b90e373a2d9e568993d08049429ca382562d252264ca83d9f Oct 01 05:44:06 crc kubenswrapper[4661]: W1001 05:44:06.646191 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod29b41883_13c4_454f_a3d3_45aa0db29f82.slice/crio-1c223cae4a5bcee803c18543e7c62fb2e509cce254ebf933a49ab9e151826859 WatchSource:0}: Error finding container 1c223cae4a5bcee803c18543e7c62fb2e509cce254ebf933a49ab9e151826859: Status 404 returned error can't find the container with id 1c223cae4a5bcee803c18543e7c62fb2e509cce254ebf933a49ab9e151826859 Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.647097 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-598c4c8547-xs8zb"] Oct 01 05:44:06 crc kubenswrapper[4661]: W1001 05:44:06.648073 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod01c6b4d6_55f6_4837_af54_9eb764262d03.slice/crio-1223d50e9e44c50ca50660bb931051b0b17d2b85decb6f320ec61b13736bf5ad WatchSource:0}: Error finding container 1223d50e9e44c50ca50660bb931051b0b17d2b85decb6f320ec61b13736bf5ad: Status 404 returned error can't find the container with id 1223d50e9e44c50ca50660bb931051b0b17d2b85decb6f320ec61b13736bf5ad Oct 01 05:44:06 crc kubenswrapper[4661]: E1001 05:44:06.649875 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pnjm7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-84c745747f-lhhfj_openstack-operators(01c6b4d6-55f6-4837-af54-9eb764262d03): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 01 05:44:06 crc kubenswrapper[4661]: E1001 05:44:06.650295 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2fl8h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-75756dd4d9-2hjs8_openstack-operators(1789dd27-2b5e-46e6-9260-affd4daf86cb): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 01 05:44:06 crc kubenswrapper[4661]: W1001 05:44:06.651192 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5826e57e_36e6_43e0_8141_7e6e3ae936a6.slice/crio-65dc6f784fb88a17f1a7fd238ddcd3ee6cc7d51c1e1cd3a70b0c0abc65c4b1f0 WatchSource:0}: Error finding container 65dc6f784fb88a17f1a7fd238ddcd3ee6cc7d51c1e1cd3a70b0c0abc65c4b1f0: Status 404 returned error can't find the container with id 65dc6f784fb88a17f1a7fd238ddcd3ee6cc7d51c1e1cd3a70b0c0abc65c4b1f0 Oct 01 05:44:06 crc kubenswrapper[4661]: E1001 05:44:06.653926 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-56dw4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-cb66d6b59-cd2rk_openstack-operators(5826e57e-36e6-43e0-8141-7e6e3ae936a6): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 01 05:44:06 crc kubenswrapper[4661]: E1001 05:44:06.656934 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-w5qmb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-657c6b68c7-cv29g_openstack-operators(a9e55ad4-d1a5-4830-96db-02c95384650c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.657017 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-657c6b68c7-cv29g"] Oct 01 05:44:06 crc kubenswrapper[4661]: W1001 05:44:06.658323 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3ad43452_d673_42a7_8495_887b5e93cacb.slice/crio-016a77aa4a5afb6a8745147833558cb262d3aa4be2e7f327c0cc1f3e65b169d8 WatchSource:0}: Error finding container 016a77aa4a5afb6a8745147833558cb262d3aa4be2e7f327c0cc1f3e65b169d8: Status 404 returned error can't find the container with id 016a77aa4a5afb6a8745147833558cb262d3aa4be2e7f327c0cc1f3e65b169d8 Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.663720 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-8648d97544-tlsf9"] Oct 01 05:44:06 crc kubenswrapper[4661]: W1001 05:44:06.666602 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod838837a9_4076_41ba_91e4_44055ce7c97a.slice/crio-7b9c4e56b281cb476ea92a6bae222ada3cf9c4bb5a8a9236ce0564cb9b3eabda WatchSource:0}: Error finding container 7b9c4e56b281cb476ea92a6bae222ada3cf9c4bb5a8a9236ce0564cb9b3eabda: Status 404 returned error can't find the container with id 7b9c4e56b281cb476ea92a6bae222ada3cf9c4bb5a8a9236ce0564cb9b3eabda Oct 01 05:44:06 crc kubenswrapper[4661]: E1001 05:44:06.666784 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mg2ts,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-598c4c8547-xs8zb_openstack-operators(3ad43452-d673-42a7-8495-887b5e93cacb): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 01 05:44:06 crc kubenswrapper[4661]: E1001 05:44:06.667420 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wswjg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-5n77c_openstack-operators(ff6dc8a8-7a0a-47db-9aec-dbc1f8236beb): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 01 05:44:06 crc kubenswrapper[4661]: E1001 05:44:06.668743 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-5n77c" podUID="ff6dc8a8-7a0a-47db-9aec-dbc1f8236beb" Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.671610 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-59d7dc95cf-6l9mf"] Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.678221 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-6bb97fcf96-jsj4d"] Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.681650 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-5n77c"] Oct 01 05:44:06 crc kubenswrapper[4661]: E1001 05:44:06.686214 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:23fcec0642cbd40af10bca0c5d4e538662d21eda98d6dfec37c38b4d7a47191a,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-q9b52,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-59d7dc95cf-6l9mf_openstack-operators(c0601966-5144-438f-a862-3f397e7064a4): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 01 05:44:06 crc kubenswrapper[4661]: E1001 05:44:06.686701 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dtfww,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-6bb97fcf96-jsj4d_openstack-operators(838837a9-4076-41ba-91e4-44055ce7c97a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 01 05:44:06 crc kubenswrapper[4661]: I1001 05:44:06.688626 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k" Oct 01 05:44:06 crc kubenswrapper[4661]: E1001 05:44:06.921115 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-lhhfj" podUID="01c6b4d6-55f6-4837-af54-9eb764262d03" Oct 01 05:44:07 crc kubenswrapper[4661]: E1001 05:44:07.056020 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-cv29g" podUID="a9e55ad4-d1a5-4830-96db-02c95384650c" Oct 01 05:44:07 crc kubenswrapper[4661]: E1001 05:44:07.056697 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-2hjs8" podUID="1789dd27-2b5e-46e6-9260-affd4daf86cb" Oct 01 05:44:07 crc kubenswrapper[4661]: I1001 05:44:07.217474 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-8fsv8" event={"ID":"12785d9c-9cdb-4c80-bc4b-ee398e655992","Type":"ContainerStarted","Data":"7ee400af7b4a10061e715445bab53df46ab843187e17c8b3eda10dac35e188ff"} Oct 01 05:44:07 crc kubenswrapper[4661]: I1001 05:44:07.226098 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-5n77c" event={"ID":"ff6dc8a8-7a0a-47db-9aec-dbc1f8236beb","Type":"ContainerStarted","Data":"612e559fa53d8b7f6328d7cb2582ddf48beddc99ed91e1d582520aa239f5b85f"} Oct 01 05:44:07 crc kubenswrapper[4661]: E1001 05:44:07.227314 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-5n77c" podUID="ff6dc8a8-7a0a-47db-9aec-dbc1f8236beb" Oct 01 05:44:07 crc kubenswrapper[4661]: I1001 05:44:07.243275 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-lhhfj" event={"ID":"01c6b4d6-55f6-4837-af54-9eb764262d03","Type":"ContainerStarted","Data":"006b6f9690eab6b4c37ba7c5455541ae97c5b1292a46f0749c56ed198428f5d0"} Oct 01 05:44:07 crc kubenswrapper[4661]: I1001 05:44:07.243318 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-lhhfj" event={"ID":"01c6b4d6-55f6-4837-af54-9eb764262d03","Type":"ContainerStarted","Data":"1223d50e9e44c50ca50660bb931051b0b17d2b85decb6f320ec61b13736bf5ad"} Oct 01 05:44:07 crc kubenswrapper[4661]: E1001 05:44:07.244969 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-lhhfj" podUID="01c6b4d6-55f6-4837-af54-9eb764262d03" Oct 01 05:44:07 crc kubenswrapper[4661]: E1001 05:44:07.246378 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-cd2rk" podUID="5826e57e-36e6-43e0-8141-7e6e3ae936a6" Oct 01 05:44:07 crc kubenswrapper[4661]: I1001 05:44:07.248853 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-6l9mf" event={"ID":"c0601966-5144-438f-a862-3f397e7064a4","Type":"ContainerStarted","Data":"3ef1c57a6986436f812c6b6ac74e308069bbead0c4375ffb3d3a426a21a6523c"} Oct 01 05:44:07 crc kubenswrapper[4661]: I1001 05:44:07.251199 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6b96467f46-8kwhx" event={"ID":"2e8d1048-762f-4737-82c2-c6244072bf9d","Type":"ContainerStarted","Data":"30c2a96b8613fe444d912784a2b4f2395d822e7719684a3251e151c643fa4ddd"} Oct 01 05:44:07 crc kubenswrapper[4661]: E1001 05:44:07.252236 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-6l9mf" podUID="c0601966-5144-438f-a862-3f397e7064a4" Oct 01 05:44:07 crc kubenswrapper[4661]: I1001 05:44:07.264019 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-9msgq" event={"ID":"0e131827-21a2-4464-80d3-7528c1d8c52a","Type":"ContainerStarted","Data":"73fda725e0e450b180a80312968d5e420bcb4f287a3309d2ce09bd47ae9e1220"} Oct 01 05:44:07 crc kubenswrapper[4661]: E1001 05:44:07.288028 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-jsj4d" podUID="838837a9-4076-41ba-91e4-44055ce7c97a" Oct 01 05:44:07 crc kubenswrapper[4661]: E1001 05:44:07.288319 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-xs8zb" podUID="3ad43452-d673-42a7-8495-887b5e93cacb" Oct 01 05:44:07 crc kubenswrapper[4661]: I1001 05:44:07.291255 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-7w27g" event={"ID":"001f8e02-2d35-4c68-88ae-4d732588213c","Type":"ContainerStarted","Data":"58a412ff2a7d6c8f26f9704f2e2e6c2555f63d5211e33283d66faae40352c54c"} Oct 01 05:44:07 crc kubenswrapper[4661]: I1001 05:44:07.300017 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-cd2rk" event={"ID":"5826e57e-36e6-43e0-8141-7e6e3ae936a6","Type":"ContainerStarted","Data":"65dc6f784fb88a17f1a7fd238ddcd3ee6cc7d51c1e1cd3a70b0c0abc65c4b1f0"} Oct 01 05:44:07 crc kubenswrapper[4661]: E1001 05:44:07.308762 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-cd2rk" podUID="5826e57e-36e6-43e0-8141-7e6e3ae936a6" Oct 01 05:44:07 crc kubenswrapper[4661]: I1001 05:44:07.338481 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-xs8zb" event={"ID":"3ad43452-d673-42a7-8495-887b5e93cacb","Type":"ContainerStarted","Data":"016a77aa4a5afb6a8745147833558cb262d3aa4be2e7f327c0cc1f3e65b169d8"} Oct 01 05:44:07 crc kubenswrapper[4661]: E1001 05:44:07.341258 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2\\\"\"" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-xs8zb" podUID="3ad43452-d673-42a7-8495-887b5e93cacb" Oct 01 05:44:07 crc kubenswrapper[4661]: I1001 05:44:07.343284 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6f589bc7f7-7fjjd" event={"ID":"50f20957-2408-4a65-a326-e3b76051b38b","Type":"ContainerStarted","Data":"f58737d25e8e85e7af9996c33a48cf8737f4c3c49cdbe31b990007ea00e23528"} Oct 01 05:44:07 crc kubenswrapper[4661]: I1001 05:44:07.358270 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-cv29g" event={"ID":"a9e55ad4-d1a5-4830-96db-02c95384650c","Type":"ContainerStarted","Data":"1bd0f0272fce519f5046f73d68331692b9be4d77e8c160a94d430cb9dc166b0b"} Oct 01 05:44:07 crc kubenswrapper[4661]: I1001 05:44:07.358319 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-cv29g" event={"ID":"a9e55ad4-d1a5-4830-96db-02c95384650c","Type":"ContainerStarted","Data":"27d955f00833faf6cf12672f238d6c09cbf8676e34954a0f397d65c93e1d6132"} Oct 01 05:44:07 crc kubenswrapper[4661]: I1001 05:44:07.365759 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-f248r" event={"ID":"5fbac0a5-84db-4228-9c2c-93d8d551044d","Type":"ContainerStarted","Data":"38c7f25f2f31301d47c36ba5613b0c4284830f7ca30aafde6af15c7c7413b422"} Oct 01 05:44:07 crc kubenswrapper[4661]: E1001 05:44:07.375549 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c\\\"\"" pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-cv29g" podUID="a9e55ad4-d1a5-4830-96db-02c95384650c" Oct 01 05:44:07 crc kubenswrapper[4661]: I1001 05:44:07.380474 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-fmrmk" event={"ID":"29b41883-13c4-454f-a3d3-45aa0db29f82","Type":"ContainerStarted","Data":"1c223cae4a5bcee803c18543e7c62fb2e509cce254ebf933a49ab9e151826859"} Oct 01 05:44:07 crc kubenswrapper[4661]: I1001 05:44:07.390846 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-2hjs8" event={"ID":"1789dd27-2b5e-46e6-9260-affd4daf86cb","Type":"ContainerStarted","Data":"febe1ce90fc2a7d63b6e9060d21a198a2f401e67ccff98f174ee992ee6321862"} Oct 01 05:44:07 crc kubenswrapper[4661]: I1001 05:44:07.390907 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-2hjs8" event={"ID":"1789dd27-2b5e-46e6-9260-affd4daf86cb","Type":"ContainerStarted","Data":"0d9241aa21e6b26b90e373a2d9e568993d08049429ca382562d252264ca83d9f"} Oct 01 05:44:07 crc kubenswrapper[4661]: E1001 05:44:07.392487 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-2hjs8" podUID="1789dd27-2b5e-46e6-9260-affd4daf86cb" Oct 01 05:44:07 crc kubenswrapper[4661]: I1001 05:44:07.395602 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-jsj4d" event={"ID":"838837a9-4076-41ba-91e4-44055ce7c97a","Type":"ContainerStarted","Data":"7b9c4e56b281cb476ea92a6bae222ada3cf9c4bb5a8a9236ce0564cb9b3eabda"} Oct 01 05:44:07 crc kubenswrapper[4661]: E1001 05:44:07.396459 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-jsj4d" podUID="838837a9-4076-41ba-91e4-44055ce7c97a" Oct 01 05:44:07 crc kubenswrapper[4661]: I1001 05:44:07.397106 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-nwsxg" event={"ID":"9d7506fd-1133-4927-872d-c68c525cba62","Type":"ContainerStarted","Data":"7895d477a6139beecff17ff4d1858cbd9b0113a3b53986228133009e25208e4e"} Oct 01 05:44:07 crc kubenswrapper[4661]: I1001 05:44:07.398418 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-8648d97544-tlsf9" event={"ID":"68f9b331-6beb-4cda-884c-326180cb52c8","Type":"ContainerStarted","Data":"6aad809a66fa06edb6f6993a857fc3957101e5a83a6272e4d54b3b328d213b69"} Oct 01 05:44:07 crc kubenswrapper[4661]: I1001 05:44:07.400887 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-dxp6r" event={"ID":"f020bcbc-c80b-4465-9733-204a86325234","Type":"ContainerStarted","Data":"48d7379419c622029424ce0918aa8982131283544f9a3a6b84ee178c34902bc0"} Oct 01 05:44:07 crc kubenswrapper[4661]: I1001 05:44:07.472285 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k"] Oct 01 05:44:08 crc kubenswrapper[4661]: I1001 05:44:08.408045 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k" event={"ID":"8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb","Type":"ContainerStarted","Data":"b244940f4608cc792dea1d1dd0069dc1148473624abddacc7150ddd48c49258e"} Oct 01 05:44:08 crc kubenswrapper[4661]: I1001 05:44:08.411313 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-8648d97544-tlsf9" event={"ID":"68f9b331-6beb-4cda-884c-326180cb52c8","Type":"ContainerStarted","Data":"ce48b07c76ec08b4c42b74d95eee9b1cce93edce6fdff8e63faa7ca68ada80e9"} Oct 01 05:44:08 crc kubenswrapper[4661]: I1001 05:44:08.411369 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-8648d97544-tlsf9" event={"ID":"68f9b331-6beb-4cda-884c-326180cb52c8","Type":"ContainerStarted","Data":"c0f489d19f4680c5b0794ad3e169c6ec8694464717421c2854ac1660dbafeebd"} Oct 01 05:44:08 crc kubenswrapper[4661]: I1001 05:44:08.412696 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-8648d97544-tlsf9" Oct 01 05:44:08 crc kubenswrapper[4661]: I1001 05:44:08.416131 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-cd2rk" event={"ID":"5826e57e-36e6-43e0-8141-7e6e3ae936a6","Type":"ContainerStarted","Data":"900ce1aaced5f0885482534a0fa5fe2d84270d5c4e3428d8c4a6b1347fbbd80b"} Oct 01 05:44:08 crc kubenswrapper[4661]: E1001 05:44:08.419283 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-cd2rk" podUID="5826e57e-36e6-43e0-8141-7e6e3ae936a6" Oct 01 05:44:08 crc kubenswrapper[4661]: I1001 05:44:08.419345 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-xs8zb" event={"ID":"3ad43452-d673-42a7-8495-887b5e93cacb","Type":"ContainerStarted","Data":"a60cc5b8f6cc690980f254b27f2942662ffd6b732d55f01a9fe52b7311d33284"} Oct 01 05:44:08 crc kubenswrapper[4661]: E1001 05:44:08.420475 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2\\\"\"" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-xs8zb" podUID="3ad43452-d673-42a7-8495-887b5e93cacb" Oct 01 05:44:08 crc kubenswrapper[4661]: I1001 05:44:08.421683 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-6l9mf" event={"ID":"c0601966-5144-438f-a862-3f397e7064a4","Type":"ContainerStarted","Data":"c7eccc30e871af4a466ae9b32d785a8b188ac703580a43b9f914f8c85c817912"} Oct 01 05:44:08 crc kubenswrapper[4661]: E1001 05:44:08.423474 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:23fcec0642cbd40af10bca0c5d4e538662d21eda98d6dfec37c38b4d7a47191a\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-6l9mf" podUID="c0601966-5144-438f-a862-3f397e7064a4" Oct 01 05:44:08 crc kubenswrapper[4661]: I1001 05:44:08.425683 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-jsj4d" event={"ID":"838837a9-4076-41ba-91e4-44055ce7c97a","Type":"ContainerStarted","Data":"741075d282537255d49f7785422e5a8124228c9acae8fa0af9b0a5ccbe2d53b8"} Oct 01 05:44:08 crc kubenswrapper[4661]: E1001 05:44:08.427188 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-jsj4d" podUID="838837a9-4076-41ba-91e4-44055ce7c97a" Oct 01 05:44:08 crc kubenswrapper[4661]: E1001 05:44:08.427293 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c\\\"\"" pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-cv29g" podUID="a9e55ad4-d1a5-4830-96db-02c95384650c" Oct 01 05:44:08 crc kubenswrapper[4661]: E1001 05:44:08.427345 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-5n77c" podUID="ff6dc8a8-7a0a-47db-9aec-dbc1f8236beb" Oct 01 05:44:08 crc kubenswrapper[4661]: E1001 05:44:08.427430 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-2hjs8" podUID="1789dd27-2b5e-46e6-9260-affd4daf86cb" Oct 01 05:44:08 crc kubenswrapper[4661]: E1001 05:44:08.427735 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-lhhfj" podUID="01c6b4d6-55f6-4837-af54-9eb764262d03" Oct 01 05:44:08 crc kubenswrapper[4661]: I1001 05:44:08.440488 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-8648d97544-tlsf9" podStartSLOduration=3.4404734550000002 podStartE2EDuration="3.440473455s" podCreationTimestamp="2025-10-01 05:44:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:44:08.436851696 +0000 UTC m=+897.374830310" watchObservedRunningTime="2025-10-01 05:44:08.440473455 +0000 UTC m=+897.378452069" Oct 01 05:44:09 crc kubenswrapper[4661]: E1001 05:44:09.434418 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:23fcec0642cbd40af10bca0c5d4e538662d21eda98d6dfec37c38b4d7a47191a\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-6l9mf" podUID="c0601966-5144-438f-a862-3f397e7064a4" Oct 01 05:44:09 crc kubenswrapper[4661]: E1001 05:44:09.434550 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-cd2rk" podUID="5826e57e-36e6-43e0-8141-7e6e3ae936a6" Oct 01 05:44:09 crc kubenswrapper[4661]: E1001 05:44:09.434571 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-jsj4d" podUID="838837a9-4076-41ba-91e4-44055ce7c97a" Oct 01 05:44:09 crc kubenswrapper[4661]: E1001 05:44:09.434763 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2\\\"\"" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-xs8zb" podUID="3ad43452-d673-42a7-8495-887b5e93cacb" Oct 01 05:44:16 crc kubenswrapper[4661]: I1001 05:44:16.086655 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-8648d97544-tlsf9" Oct 01 05:44:17 crc kubenswrapper[4661]: I1001 05:44:17.513791 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6f589bc7f7-7fjjd" event={"ID":"50f20957-2408-4a65-a326-e3b76051b38b","Type":"ContainerStarted","Data":"3ce9c450a0173cf09af01235fd378169bfb786dfd9478a802abf7ddf6b6eeb27"} Oct 01 05:44:17 crc kubenswrapper[4661]: I1001 05:44:17.520832 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-nwsxg" event={"ID":"9d7506fd-1133-4927-872d-c68c525cba62","Type":"ContainerStarted","Data":"853a026b191001f37c2dd7e4ff1aa855134ff83e77b67b685d0c8eac0986ddbb"} Oct 01 05:44:17 crc kubenswrapper[4661]: I1001 05:44:17.524094 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-qslns" event={"ID":"a23b3c29-b18c-4ea0-8723-41000d6a754b","Type":"ContainerStarted","Data":"69089887c8e5d85ba78d67b72ad58f35a370f820473ed1f261afbc8a78389733"} Oct 01 05:44:17 crc kubenswrapper[4661]: I1001 05:44:17.525907 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79f9fc9fd8-68scz" event={"ID":"34d73c0f-65b8-4718-8409-849ac307168f","Type":"ContainerStarted","Data":"0a91f72d46b9b46e29746804409e6d58872d7d86d8412701794db46f17064bbc"} Oct 01 05:44:17 crc kubenswrapper[4661]: I1001 05:44:17.531963 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-7w27g" event={"ID":"001f8e02-2d35-4c68-88ae-4d732588213c","Type":"ContainerStarted","Data":"7434851bb64b6a8eac3e22d29106fa40d08d4a6c29142302f146ef11ae9e1b77"} Oct 01 05:44:17 crc kubenswrapper[4661]: I1001 05:44:17.552015 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-fmrmk" event={"ID":"29b41883-13c4-454f-a3d3-45aa0db29f82","Type":"ContainerStarted","Data":"31aea2ec17176a2ea1ef4fa2aabcc3f035836bfad62b0d2c79986c514c295f4d"} Oct 01 05:44:17 crc kubenswrapper[4661]: I1001 05:44:17.560802 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-p6lbb" event={"ID":"1e3e3612-5d8a-4db4-af00-94428fcb570e","Type":"ContainerStarted","Data":"a3603c328e0494e7252e0df0ec98b8e104d64b45e558c59e058bd2953a52ed2c"} Oct 01 05:44:17 crc kubenswrapper[4661]: I1001 05:44:17.560847 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-p6lbb" event={"ID":"1e3e3612-5d8a-4db4-af00-94428fcb570e","Type":"ContainerStarted","Data":"781eb81c0dc2ab1df4a45d685b382a2bcdd2726429cc4ebf022065be1823658a"} Oct 01 05:44:17 crc kubenswrapper[4661]: I1001 05:44:17.561143 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-p6lbb" Oct 01 05:44:17 crc kubenswrapper[4661]: I1001 05:44:17.562962 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6b96467f46-8kwhx" event={"ID":"2e8d1048-762f-4737-82c2-c6244072bf9d","Type":"ContainerStarted","Data":"1a05cf15c2fdeff62327e44f67d99cfe5b4220a009acd16465bc99f15b6fb2ce"} Oct 01 05:44:17 crc kubenswrapper[4661]: I1001 05:44:17.564231 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-9msgq" event={"ID":"0e131827-21a2-4464-80d3-7528c1d8c52a","Type":"ContainerStarted","Data":"4f0a7d9f9017421fe05c0671903224dbb1b0fe536ed60eb9502f3aef4c1bc746"} Oct 01 05:44:17 crc kubenswrapper[4661]: I1001 05:44:17.582893 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-8fsv8" event={"ID":"12785d9c-9cdb-4c80-bc4b-ee398e655992","Type":"ContainerStarted","Data":"36ceef8da76cf67f40e27115ba6b7a30aa8cb96c88a085a64d663e8931eb7ef3"} Oct 01 05:44:17 crc kubenswrapper[4661]: I1001 05:44:17.592468 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k" event={"ID":"8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb","Type":"ContainerStarted","Data":"4553bb09a3752cbe87ec4439dba2ae671107815f29a8e40584b48eebc27ea60f"} Oct 01 05:44:17 crc kubenswrapper[4661]: I1001 05:44:17.621108 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-jszh9" event={"ID":"dfdbd600-7bd1-43fa-a4cf-44f66f79e3e2","Type":"ContainerStarted","Data":"2312aa5698d7e12f38ff300b6f7d930d61106756da8928453fec1954d5bb4f60"} Oct 01 05:44:17 crc kubenswrapper[4661]: I1001 05:44:17.655885 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-f248r" event={"ID":"5fbac0a5-84db-4228-9c2c-93d8d551044d","Type":"ContainerStarted","Data":"dbd1deb1db63a858065f63bf7e6d43a3aabebe56441484198f3c4d48c7810b56"} Oct 01 05:44:17 crc kubenswrapper[4661]: I1001 05:44:17.676840 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-dxp6r" event={"ID":"f020bcbc-c80b-4465-9733-204a86325234","Type":"ContainerStarted","Data":"9ab181d73581e1c184633206c3cd35ee4ee44780e524b1505b5459d90ef963a5"} Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.684597 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6f589bc7f7-7fjjd" event={"ID":"50f20957-2408-4a65-a326-e3b76051b38b","Type":"ContainerStarted","Data":"7a12c9f5463f3f401d739cf745ba51e648e14118d4be3054bbfa44e18418eace"} Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.684897 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6f589bc7f7-7fjjd" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.686124 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-8fsv8" event={"ID":"12785d9c-9cdb-4c80-bc4b-ee398e655992","Type":"ContainerStarted","Data":"27a71fe9de2ebe82ce283e0466db437d3ee989bcf846a164f2799fd236b71a09"} Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.686164 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-8fsv8" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.687516 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-7w27g" event={"ID":"001f8e02-2d35-4c68-88ae-4d732588213c","Type":"ContainerStarted","Data":"35ae3a49a41d24b7af029dd2919801a35ec8f42c70985007db60d642f1b5cdbd"} Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.687638 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-7w27g" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.689244 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-f248r" event={"ID":"5fbac0a5-84db-4228-9c2c-93d8d551044d","Type":"ContainerStarted","Data":"60fb60893863dcce5a891d22abfcc36304bd36c7b25d28007eae3bed4452421c"} Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.689362 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-f248r" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.690932 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-dxp6r" event={"ID":"f020bcbc-c80b-4465-9733-204a86325234","Type":"ContainerStarted","Data":"f98e2e59e3fa6a4d46e99ab4e0e27ca44cff7e8ba3278f061ce7dccfa3cc9b5f"} Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.691073 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-dxp6r" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.692672 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6b96467f46-8kwhx" event={"ID":"2e8d1048-762f-4737-82c2-c6244072bf9d","Type":"ContainerStarted","Data":"a11843219f13e8a5d58380e59c937626aa1637e3b9f2edb572bf625fa1887add"} Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.692772 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-6b96467f46-8kwhx" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.694561 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-9msgq" event={"ID":"0e131827-21a2-4464-80d3-7528c1d8c52a","Type":"ContainerStarted","Data":"8f1ccf28db4c7f7bf747d6f5940d623c09364c5adfa9c88182897bb455bb5665"} Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.694686 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-9msgq" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.696655 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79f9fc9fd8-68scz" event={"ID":"34d73c0f-65b8-4718-8409-849ac307168f","Type":"ContainerStarted","Data":"e447af05f086d75373b7e4f5015bb768d5725e8570b7b092a7e6bf34a484f48e"} Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.696783 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-79f9fc9fd8-68scz" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.698003 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-qslns" event={"ID":"a23b3c29-b18c-4ea0-8723-41000d6a754b","Type":"ContainerStarted","Data":"a6de0d1e7c2c321632898832a8fb682fb90b9321bcc433b2051fdd3483c68547"} Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.698155 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-qslns" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.700212 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6f589bc7f7-7fjjd" podStartSLOduration=4.530592067 podStartE2EDuration="14.700199935s" podCreationTimestamp="2025-10-01 05:44:04 +0000 UTC" firstStartedPulling="2025-10-01 05:44:06.585498565 +0000 UTC m=+895.523477179" lastFinishedPulling="2025-10-01 05:44:16.755106393 +0000 UTC m=+905.693085047" observedRunningTime="2025-10-01 05:44:18.698578851 +0000 UTC m=+907.636557465" watchObservedRunningTime="2025-10-01 05:44:18.700199935 +0000 UTC m=+907.638178569" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.701725 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-jszh9" event={"ID":"dfdbd600-7bd1-43fa-a4cf-44f66f79e3e2","Type":"ContainerStarted","Data":"c21b2d394323c6c39a0436c8491ffc9fa3dd422f16a9dd6f3c33efb08d45f8e5"} Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.701846 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-jszh9" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.702831 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-p6lbb" podStartSLOduration=4.106506813 podStartE2EDuration="14.702822847s" podCreationTimestamp="2025-10-01 05:44:04 +0000 UTC" firstStartedPulling="2025-10-01 05:44:06.162645515 +0000 UTC m=+895.100624129" lastFinishedPulling="2025-10-01 05:44:16.758961529 +0000 UTC m=+905.696940163" observedRunningTime="2025-10-01 05:44:17.588034519 +0000 UTC m=+906.526013133" watchObservedRunningTime="2025-10-01 05:44:18.702822847 +0000 UTC m=+907.640801471" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.703391 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-fmrmk" event={"ID":"29b41883-13c4-454f-a3d3-45aa0db29f82","Type":"ContainerStarted","Data":"5968d0211f78dbd3d01e420d93bdf954a49c375ea23f5d4a2064d6f038911e79"} Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.703503 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-fmrmk" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.705114 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-nwsxg" event={"ID":"9d7506fd-1133-4927-872d-c68c525cba62","Type":"ContainerStarted","Data":"a5512eba6882bd8e07f8f31f232a8ac5734e62a5ec0673a60149f0fc307f1f07"} Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.705255 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-nwsxg" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.706814 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k" event={"ID":"8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb","Type":"ContainerStarted","Data":"413dcf78af700e0c1357db6f24cda002139d7e3eaa64bb01bb99beab8a75076f"} Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.724175 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-6b96467f46-8kwhx" podStartSLOduration=4.611912824 podStartE2EDuration="14.724154787s" podCreationTimestamp="2025-10-01 05:44:04 +0000 UTC" firstStartedPulling="2025-10-01 05:44:06.648125136 +0000 UTC m=+895.586103750" lastFinishedPulling="2025-10-01 05:44:16.760367089 +0000 UTC m=+905.698345713" observedRunningTime="2025-10-01 05:44:18.713533674 +0000 UTC m=+907.651512298" watchObservedRunningTime="2025-10-01 05:44:18.724154787 +0000 UTC m=+907.662133411" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.763807 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-8fsv8" podStartSLOduration=4.6536136280000004 podStartE2EDuration="14.763788563s" podCreationTimestamp="2025-10-01 05:44:04 +0000 UTC" firstStartedPulling="2025-10-01 05:44:06.640289349 +0000 UTC m=+895.578267963" lastFinishedPulling="2025-10-01 05:44:16.750464254 +0000 UTC m=+905.688442898" observedRunningTime="2025-10-01 05:44:18.736143189 +0000 UTC m=+907.674121803" watchObservedRunningTime="2025-10-01 05:44:18.763788563 +0000 UTC m=+907.701767187" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.788192 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-f248r" podStartSLOduration=4.683854524 podStartE2EDuration="14.788175437s" podCreationTimestamp="2025-10-01 05:44:04 +0000 UTC" firstStartedPulling="2025-10-01 05:44:06.648084205 +0000 UTC m=+895.586062819" lastFinishedPulling="2025-10-01 05:44:16.752405108 +0000 UTC m=+905.690383732" observedRunningTime="2025-10-01 05:44:18.763092614 +0000 UTC m=+907.701071238" watchObservedRunningTime="2025-10-01 05:44:18.788175437 +0000 UTC m=+907.726154051" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.789060 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-9msgq" podStartSLOduration=4.664727754 podStartE2EDuration="14.789053691s" podCreationTimestamp="2025-10-01 05:44:04 +0000 UTC" firstStartedPulling="2025-10-01 05:44:06.632151064 +0000 UTC m=+895.570129678" lastFinishedPulling="2025-10-01 05:44:16.756477001 +0000 UTC m=+905.694455615" observedRunningTime="2025-10-01 05:44:18.779011403 +0000 UTC m=+907.716990027" watchObservedRunningTime="2025-10-01 05:44:18.789053691 +0000 UTC m=+907.727032305" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.798438 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-dxp6r" podStartSLOduration=4.675207755 podStartE2EDuration="14.79842093s" podCreationTimestamp="2025-10-01 05:44:04 +0000 UTC" firstStartedPulling="2025-10-01 05:44:06.632259408 +0000 UTC m=+895.570238022" lastFinishedPulling="2025-10-01 05:44:16.755472573 +0000 UTC m=+905.693451197" observedRunningTime="2025-10-01 05:44:18.794534883 +0000 UTC m=+907.732513517" watchObservedRunningTime="2025-10-01 05:44:18.79842093 +0000 UTC m=+907.736399554" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.810682 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-7w27g" podStartSLOduration=4.69135042 podStartE2EDuration="14.810666719s" podCreationTimestamp="2025-10-01 05:44:04 +0000 UTC" firstStartedPulling="2025-10-01 05:44:06.637201524 +0000 UTC m=+895.575180138" lastFinishedPulling="2025-10-01 05:44:16.756517783 +0000 UTC m=+905.694496437" observedRunningTime="2025-10-01 05:44:18.807340357 +0000 UTC m=+907.745318981" watchObservedRunningTime="2025-10-01 05:44:18.810666719 +0000 UTC m=+907.748645333" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.832660 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-79f9fc9fd8-68scz" podStartSLOduration=4.212217476 podStartE2EDuration="14.832641707s" podCreationTimestamp="2025-10-01 05:44:04 +0000 UTC" firstStartedPulling="2025-10-01 05:44:06.140110672 +0000 UTC m=+895.078089286" lastFinishedPulling="2025-10-01 05:44:16.760534903 +0000 UTC m=+905.698513517" observedRunningTime="2025-10-01 05:44:18.82771197 +0000 UTC m=+907.765690584" watchObservedRunningTime="2025-10-01 05:44:18.832641707 +0000 UTC m=+907.770620321" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.848385 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-qslns" podStartSLOduration=3.934319572 podStartE2EDuration="14.848367371s" podCreationTimestamp="2025-10-01 05:44:04 +0000 UTC" firstStartedPulling="2025-10-01 05:44:05.836828808 +0000 UTC m=+894.774807422" lastFinishedPulling="2025-10-01 05:44:16.750876607 +0000 UTC m=+905.688855221" observedRunningTime="2025-10-01 05:44:18.843554869 +0000 UTC m=+907.781533483" watchObservedRunningTime="2025-10-01 05:44:18.848367371 +0000 UTC m=+907.786345995" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.859254 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-fmrmk" podStartSLOduration=4.749692895 podStartE2EDuration="14.859235462s" podCreationTimestamp="2025-10-01 05:44:04 +0000 UTC" firstStartedPulling="2025-10-01 05:44:06.649444003 +0000 UTC m=+895.587422617" lastFinishedPulling="2025-10-01 05:44:16.75898654 +0000 UTC m=+905.696965184" observedRunningTime="2025-10-01 05:44:18.856949488 +0000 UTC m=+907.794928112" watchObservedRunningTime="2025-10-01 05:44:18.859235462 +0000 UTC m=+907.797214096" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.876015 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-jszh9" podStartSLOduration=4.257869117 podStartE2EDuration="14.876002015s" podCreationTimestamp="2025-10-01 05:44:04 +0000 UTC" firstStartedPulling="2025-10-01 05:44:06.134183318 +0000 UTC m=+895.072161932" lastFinishedPulling="2025-10-01 05:44:16.752316176 +0000 UTC m=+905.690294830" observedRunningTime="2025-10-01 05:44:18.871792399 +0000 UTC m=+907.809771003" watchObservedRunningTime="2025-10-01 05:44:18.876002015 +0000 UTC m=+907.813980629" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.897771 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k" podStartSLOduration=5.702912105 podStartE2EDuration="14.897755197s" podCreationTimestamp="2025-10-01 05:44:04 +0000 UTC" firstStartedPulling="2025-10-01 05:44:07.564841168 +0000 UTC m=+896.502819782" lastFinishedPulling="2025-10-01 05:44:16.75968425 +0000 UTC m=+905.697662874" observedRunningTime="2025-10-01 05:44:18.894811515 +0000 UTC m=+907.832790129" watchObservedRunningTime="2025-10-01 05:44:18.897755197 +0000 UTC m=+907.835733811" Oct 01 05:44:18 crc kubenswrapper[4661]: I1001 05:44:18.914649 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-nwsxg" podStartSLOduration=4.792598589 podStartE2EDuration="14.914620523s" podCreationTimestamp="2025-10-01 05:44:04 +0000 UTC" firstStartedPulling="2025-10-01 05:44:06.631871926 +0000 UTC m=+895.569850540" lastFinishedPulling="2025-10-01 05:44:16.75389382 +0000 UTC m=+905.691872474" observedRunningTime="2025-10-01 05:44:18.912063262 +0000 UTC m=+907.850041876" watchObservedRunningTime="2025-10-01 05:44:18.914620523 +0000 UTC m=+907.852599137" Oct 01 05:44:19 crc kubenswrapper[4661]: I1001 05:44:19.722407 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k" Oct 01 05:44:22 crc kubenswrapper[4661]: I1001 05:44:22.743315 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-5n77c" event={"ID":"ff6dc8a8-7a0a-47db-9aec-dbc1f8236beb","Type":"ContainerStarted","Data":"5929b4f5c927523a16a6e7b192fff9a34adbeb4d4e5287dfababd3bfcf00838c"} Oct 01 05:44:22 crc kubenswrapper[4661]: I1001 05:44:22.745112 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-cv29g" event={"ID":"a9e55ad4-d1a5-4830-96db-02c95384650c","Type":"ContainerStarted","Data":"4bdaa1368d4aaa67a852daf2d1d3be8773f181296c6b45ae65bd75f9cd86c448"} Oct 01 05:44:22 crc kubenswrapper[4661]: I1001 05:44:22.745334 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-cv29g" Oct 01 05:44:22 crc kubenswrapper[4661]: I1001 05:44:22.761303 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-5n77c" podStartSLOduration=2.766009939 podStartE2EDuration="17.761286524s" podCreationTimestamp="2025-10-01 05:44:05 +0000 UTC" firstStartedPulling="2025-10-01 05:44:06.667281105 +0000 UTC m=+895.605259719" lastFinishedPulling="2025-10-01 05:44:21.66255769 +0000 UTC m=+910.600536304" observedRunningTime="2025-10-01 05:44:22.758335562 +0000 UTC m=+911.696314166" watchObservedRunningTime="2025-10-01 05:44:22.761286524 +0000 UTC m=+911.699265128" Oct 01 05:44:22 crc kubenswrapper[4661]: I1001 05:44:22.775807 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-cv29g" podStartSLOduration=3.772141659 podStartE2EDuration="18.775790655s" podCreationTimestamp="2025-10-01 05:44:04 +0000 UTC" firstStartedPulling="2025-10-01 05:44:06.656817376 +0000 UTC m=+895.594795990" lastFinishedPulling="2025-10-01 05:44:21.660466372 +0000 UTC m=+910.598444986" observedRunningTime="2025-10-01 05:44:22.773995196 +0000 UTC m=+911.711973810" watchObservedRunningTime="2025-10-01 05:44:22.775790655 +0000 UTC m=+911.713769269" Oct 01 05:44:23 crc kubenswrapper[4661]: I1001 05:44:23.754371 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-lhhfj" event={"ID":"01c6b4d6-55f6-4837-af54-9eb764262d03","Type":"ContainerStarted","Data":"338e87d10dc1744f9a25850a6d896b1b72743728107a90a6223d0b5e2cd02b5b"} Oct 01 05:44:23 crc kubenswrapper[4661]: I1001 05:44:23.755484 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-lhhfj" Oct 01 05:44:23 crc kubenswrapper[4661]: I1001 05:44:23.783204 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-lhhfj" podStartSLOduration=3.716415799 podStartE2EDuration="19.783177864s" podCreationTimestamp="2025-10-01 05:44:04 +0000 UTC" firstStartedPulling="2025-10-01 05:44:06.649736801 +0000 UTC m=+895.587715415" lastFinishedPulling="2025-10-01 05:44:22.716498836 +0000 UTC m=+911.654477480" observedRunningTime="2025-10-01 05:44:23.77686569 +0000 UTC m=+912.714844344" watchObservedRunningTime="2025-10-01 05:44:23.783177864 +0000 UTC m=+912.721156518" Oct 01 05:44:24 crc kubenswrapper[4661]: I1001 05:44:24.731613 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-qslns" Oct 01 05:44:24 crc kubenswrapper[4661]: I1001 05:44:24.747764 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-p6lbb" Oct 01 05:44:24 crc kubenswrapper[4661]: I1001 05:44:24.765757 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-jszh9" Oct 01 05:44:24 crc kubenswrapper[4661]: I1001 05:44:24.775436 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-fmrmk" Oct 01 05:44:24 crc kubenswrapper[4661]: I1001 05:44:24.777286 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-cd2rk" event={"ID":"5826e57e-36e6-43e0-8141-7e6e3ae936a6","Type":"ContainerStarted","Data":"e84b8fe8228b373f9e1cf2be9f3a1ea2d414f907d6d973b4dd96fc764d5ce9cc"} Oct 01 05:44:24 crc kubenswrapper[4661]: I1001 05:44:24.777490 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-cd2rk" Oct 01 05:44:24 crc kubenswrapper[4661]: I1001 05:44:24.781811 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-xs8zb" event={"ID":"3ad43452-d673-42a7-8495-887b5e93cacb","Type":"ContainerStarted","Data":"34989ee8f02a42fe8c9ad5292d2b4bb0e049f7033eed480615b3ed509a16ef3e"} Oct 01 05:44:24 crc kubenswrapper[4661]: I1001 05:44:24.782088 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-xs8zb" Oct 01 05:44:24 crc kubenswrapper[4661]: I1001 05:44:24.788296 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-6l9mf" event={"ID":"c0601966-5144-438f-a862-3f397e7064a4","Type":"ContainerStarted","Data":"4fd7e210b5b4302b86db75842f7e57a57d8f2199b87e77ccbbb3c8dc30838f80"} Oct 01 05:44:24 crc kubenswrapper[4661]: I1001 05:44:24.788540 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-6l9mf" Oct 01 05:44:24 crc kubenswrapper[4661]: I1001 05:44:24.826002 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-6l9mf" podStartSLOduration=3.060355853 podStartE2EDuration="20.825988092s" podCreationTimestamp="2025-10-01 05:44:04 +0000 UTC" firstStartedPulling="2025-10-01 05:44:06.686091816 +0000 UTC m=+895.624070430" lastFinishedPulling="2025-10-01 05:44:24.451724015 +0000 UTC m=+913.389702669" observedRunningTime="2025-10-01 05:44:24.824301385 +0000 UTC m=+913.762279999" watchObservedRunningTime="2025-10-01 05:44:24.825988092 +0000 UTC m=+913.763966706" Oct 01 05:44:24 crc kubenswrapper[4661]: I1001 05:44:24.849248 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-7w27g" Oct 01 05:44:24 crc kubenswrapper[4661]: I1001 05:44:24.855540 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-xs8zb" podStartSLOduration=3.066178103 podStartE2EDuration="20.855521939s" podCreationTimestamp="2025-10-01 05:44:04 +0000 UTC" firstStartedPulling="2025-10-01 05:44:06.666607057 +0000 UTC m=+895.604585671" lastFinishedPulling="2025-10-01 05:44:24.455950853 +0000 UTC m=+913.393929507" observedRunningTime="2025-10-01 05:44:24.839320311 +0000 UTC m=+913.777298925" watchObservedRunningTime="2025-10-01 05:44:24.855521939 +0000 UTC m=+913.793500553" Oct 01 05:44:24 crc kubenswrapper[4661]: I1001 05:44:24.857650 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-cd2rk" podStartSLOduration=3.048153984 podStartE2EDuration="20.857620457s" podCreationTimestamp="2025-10-01 05:44:04 +0000 UTC" firstStartedPulling="2025-10-01 05:44:06.653815863 +0000 UTC m=+895.591794467" lastFinishedPulling="2025-10-01 05:44:24.463282316 +0000 UTC m=+913.401260940" observedRunningTime="2025-10-01 05:44:24.855167969 +0000 UTC m=+913.793146583" watchObservedRunningTime="2025-10-01 05:44:24.857620457 +0000 UTC m=+913.795599071" Oct 01 05:44:24 crc kubenswrapper[4661]: I1001 05:44:24.873601 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6f589bc7f7-7fjjd" Oct 01 05:44:24 crc kubenswrapper[4661]: I1001 05:44:24.933258 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-nwsxg" Oct 01 05:44:24 crc kubenswrapper[4661]: I1001 05:44:24.948646 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-dxp6r" Oct 01 05:44:25 crc kubenswrapper[4661]: I1001 05:44:25.010268 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-79f9fc9fd8-68scz" Oct 01 05:44:25 crc kubenswrapper[4661]: I1001 05:44:25.069699 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-6b96467f46-8kwhx" Oct 01 05:44:25 crc kubenswrapper[4661]: I1001 05:44:25.132729 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-8fsv8" Oct 01 05:44:25 crc kubenswrapper[4661]: I1001 05:44:25.176296 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-f248r" Oct 01 05:44:25 crc kubenswrapper[4661]: I1001 05:44:25.460507 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-9msgq" Oct 01 05:44:26 crc kubenswrapper[4661]: I1001 05:44:26.699794 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k" Oct 01 05:44:33 crc kubenswrapper[4661]: I1001 05:44:33.874421 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-2hjs8" event={"ID":"1789dd27-2b5e-46e6-9260-affd4daf86cb","Type":"ContainerStarted","Data":"0a5407ac6c42cffba1c557b301dbeebcf1dfa7be2624d9c4241f57cf1b04d269"} Oct 01 05:44:33 crc kubenswrapper[4661]: I1001 05:44:33.877011 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-2hjs8" Oct 01 05:44:33 crc kubenswrapper[4661]: I1001 05:44:33.877758 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-jsj4d" event={"ID":"838837a9-4076-41ba-91e4-44055ce7c97a","Type":"ContainerStarted","Data":"f054035b123b9c83472aa22616b6eb9506c2452ade2fd68aab7e2f38389760f1"} Oct 01 05:44:33 crc kubenswrapper[4661]: I1001 05:44:33.878203 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-jsj4d" Oct 01 05:44:33 crc kubenswrapper[4661]: I1001 05:44:33.909845 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-2hjs8" podStartSLOduration=3.7668938450000002 podStartE2EDuration="29.909811695s" podCreationTimestamp="2025-10-01 05:44:04 +0000 UTC" firstStartedPulling="2025-10-01 05:44:06.650187443 +0000 UTC m=+895.588166057" lastFinishedPulling="2025-10-01 05:44:32.793105293 +0000 UTC m=+921.731083907" observedRunningTime="2025-10-01 05:44:33.899699855 +0000 UTC m=+922.837678499" watchObservedRunningTime="2025-10-01 05:44:33.909811695 +0000 UTC m=+922.847790349" Oct 01 05:44:33 crc kubenswrapper[4661]: I1001 05:44:33.937884 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-jsj4d" podStartSLOduration=3.837502487 podStartE2EDuration="29.937859481s" podCreationTimestamp="2025-10-01 05:44:04 +0000 UTC" firstStartedPulling="2025-10-01 05:44:06.686350733 +0000 UTC m=+895.624329347" lastFinishedPulling="2025-10-01 05:44:32.786707727 +0000 UTC m=+921.724686341" observedRunningTime="2025-10-01 05:44:33.928698966 +0000 UTC m=+922.866677620" watchObservedRunningTime="2025-10-01 05:44:33.937859481 +0000 UTC m=+922.875838125" Oct 01 05:44:34 crc kubenswrapper[4661]: I1001 05:44:34.309030 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:44:34 crc kubenswrapper[4661]: I1001 05:44:34.309116 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:44:35 crc kubenswrapper[4661]: I1001 05:44:35.197381 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-6l9mf" Oct 01 05:44:35 crc kubenswrapper[4661]: I1001 05:44:35.213589 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-lhhfj" Oct 01 05:44:35 crc kubenswrapper[4661]: I1001 05:44:35.226369 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-xs8zb" Oct 01 05:44:35 crc kubenswrapper[4661]: I1001 05:44:35.246762 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-cv29g" Oct 01 05:44:35 crc kubenswrapper[4661]: I1001 05:44:35.332726 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-cd2rk" Oct 01 05:44:45 crc kubenswrapper[4661]: I1001 05:44:45.360657 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-jsj4d" Oct 01 05:44:45 crc kubenswrapper[4661]: I1001 05:44:45.399157 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-2hjs8" Oct 01 05:45:00 crc kubenswrapper[4661]: I1001 05:45:00.166728 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v"] Oct 01 05:45:00 crc kubenswrapper[4661]: I1001 05:45:00.168799 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v" Oct 01 05:45:00 crc kubenswrapper[4661]: I1001 05:45:00.171895 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 05:45:00 crc kubenswrapper[4661]: I1001 05:45:00.181844 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 05:45:00 crc kubenswrapper[4661]: I1001 05:45:00.191527 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v"] Oct 01 05:45:00 crc kubenswrapper[4661]: I1001 05:45:00.318997 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8716dca9-1210-480b-b460-e41071589e9d-config-volume\") pod \"collect-profiles-29321625-jtv2v\" (UID: \"8716dca9-1210-480b-b460-e41071589e9d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v" Oct 01 05:45:00 crc kubenswrapper[4661]: I1001 05:45:00.319096 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfxvr\" (UniqueName: \"kubernetes.io/projected/8716dca9-1210-480b-b460-e41071589e9d-kube-api-access-vfxvr\") pod \"collect-profiles-29321625-jtv2v\" (UID: \"8716dca9-1210-480b-b460-e41071589e9d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v" Oct 01 05:45:00 crc kubenswrapper[4661]: I1001 05:45:00.319387 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8716dca9-1210-480b-b460-e41071589e9d-secret-volume\") pod \"collect-profiles-29321625-jtv2v\" (UID: \"8716dca9-1210-480b-b460-e41071589e9d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v" Oct 01 05:45:00 crc kubenswrapper[4661]: I1001 05:45:00.420672 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8716dca9-1210-480b-b460-e41071589e9d-secret-volume\") pod \"collect-profiles-29321625-jtv2v\" (UID: \"8716dca9-1210-480b-b460-e41071589e9d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v" Oct 01 05:45:00 crc kubenswrapper[4661]: I1001 05:45:00.420763 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8716dca9-1210-480b-b460-e41071589e9d-config-volume\") pod \"collect-profiles-29321625-jtv2v\" (UID: \"8716dca9-1210-480b-b460-e41071589e9d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v" Oct 01 05:45:00 crc kubenswrapper[4661]: I1001 05:45:00.420857 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfxvr\" (UniqueName: \"kubernetes.io/projected/8716dca9-1210-480b-b460-e41071589e9d-kube-api-access-vfxvr\") pod \"collect-profiles-29321625-jtv2v\" (UID: \"8716dca9-1210-480b-b460-e41071589e9d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v" Oct 01 05:45:00 crc kubenswrapper[4661]: I1001 05:45:00.422154 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8716dca9-1210-480b-b460-e41071589e9d-config-volume\") pod \"collect-profiles-29321625-jtv2v\" (UID: \"8716dca9-1210-480b-b460-e41071589e9d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v" Oct 01 05:45:00 crc kubenswrapper[4661]: I1001 05:45:00.431333 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8716dca9-1210-480b-b460-e41071589e9d-secret-volume\") pod \"collect-profiles-29321625-jtv2v\" (UID: \"8716dca9-1210-480b-b460-e41071589e9d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v" Oct 01 05:45:00 crc kubenswrapper[4661]: I1001 05:45:00.451797 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfxvr\" (UniqueName: \"kubernetes.io/projected/8716dca9-1210-480b-b460-e41071589e9d-kube-api-access-vfxvr\") pod \"collect-profiles-29321625-jtv2v\" (UID: \"8716dca9-1210-480b-b460-e41071589e9d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v" Oct 01 05:45:00 crc kubenswrapper[4661]: I1001 05:45:00.491417 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v" Oct 01 05:45:00 crc kubenswrapper[4661]: I1001 05:45:00.789302 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v"] Oct 01 05:45:01 crc kubenswrapper[4661]: I1001 05:45:01.143095 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v" event={"ID":"8716dca9-1210-480b-b460-e41071589e9d","Type":"ContainerStarted","Data":"68cf0ad20777a84bfa7e08c8f0e2e1e30ed1125df8e0fab7e1d96f14773a18e5"} Oct 01 05:45:01 crc kubenswrapper[4661]: I1001 05:45:01.143384 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v" event={"ID":"8716dca9-1210-480b-b460-e41071589e9d","Type":"ContainerStarted","Data":"278d47ac492f9db2f0a244290bcc7997e7f3ab4ec1242ac9c1d2a11c66c89685"} Oct 01 05:45:01 crc kubenswrapper[4661]: I1001 05:45:01.159073 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v" podStartSLOduration=1.15905718 podStartE2EDuration="1.15905718s" podCreationTimestamp="2025-10-01 05:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:45:01.155533142 +0000 UTC m=+950.093511806" watchObservedRunningTime="2025-10-01 05:45:01.15905718 +0000 UTC m=+950.097035804" Oct 01 05:45:02 crc kubenswrapper[4661]: I1001 05:45:02.157194 4661 generic.go:334] "Generic (PLEG): container finished" podID="8716dca9-1210-480b-b460-e41071589e9d" containerID="68cf0ad20777a84bfa7e08c8f0e2e1e30ed1125df8e0fab7e1d96f14773a18e5" exitCode=0 Oct 01 05:45:02 crc kubenswrapper[4661]: I1001 05:45:02.157353 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v" event={"ID":"8716dca9-1210-480b-b460-e41071589e9d","Type":"ContainerDied","Data":"68cf0ad20777a84bfa7e08c8f0e2e1e30ed1125df8e0fab7e1d96f14773a18e5"} Oct 01 05:45:03 crc kubenswrapper[4661]: I1001 05:45:03.466467 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v" Oct 01 05:45:03 crc kubenswrapper[4661]: I1001 05:45:03.575259 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8716dca9-1210-480b-b460-e41071589e9d-secret-volume\") pod \"8716dca9-1210-480b-b460-e41071589e9d\" (UID: \"8716dca9-1210-480b-b460-e41071589e9d\") " Oct 01 05:45:03 crc kubenswrapper[4661]: I1001 05:45:03.575407 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8716dca9-1210-480b-b460-e41071589e9d-config-volume\") pod \"8716dca9-1210-480b-b460-e41071589e9d\" (UID: \"8716dca9-1210-480b-b460-e41071589e9d\") " Oct 01 05:45:03 crc kubenswrapper[4661]: I1001 05:45:03.575488 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfxvr\" (UniqueName: \"kubernetes.io/projected/8716dca9-1210-480b-b460-e41071589e9d-kube-api-access-vfxvr\") pod \"8716dca9-1210-480b-b460-e41071589e9d\" (UID: \"8716dca9-1210-480b-b460-e41071589e9d\") " Oct 01 05:45:03 crc kubenswrapper[4661]: I1001 05:45:03.576066 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8716dca9-1210-480b-b460-e41071589e9d-config-volume" (OuterVolumeSpecName: "config-volume") pod "8716dca9-1210-480b-b460-e41071589e9d" (UID: "8716dca9-1210-480b-b460-e41071589e9d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:45:03 crc kubenswrapper[4661]: I1001 05:45:03.580577 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8716dca9-1210-480b-b460-e41071589e9d-kube-api-access-vfxvr" (OuterVolumeSpecName: "kube-api-access-vfxvr") pod "8716dca9-1210-480b-b460-e41071589e9d" (UID: "8716dca9-1210-480b-b460-e41071589e9d"). InnerVolumeSpecName "kube-api-access-vfxvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:45:03 crc kubenswrapper[4661]: I1001 05:45:03.581321 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8716dca9-1210-480b-b460-e41071589e9d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8716dca9-1210-480b-b460-e41071589e9d" (UID: "8716dca9-1210-480b-b460-e41071589e9d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:45:03 crc kubenswrapper[4661]: I1001 05:45:03.677892 4661 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8716dca9-1210-480b-b460-e41071589e9d-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 05:45:03 crc kubenswrapper[4661]: I1001 05:45:03.677944 4661 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8716dca9-1210-480b-b460-e41071589e9d-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 05:45:03 crc kubenswrapper[4661]: I1001 05:45:03.677963 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfxvr\" (UniqueName: \"kubernetes.io/projected/8716dca9-1210-480b-b460-e41071589e9d-kube-api-access-vfxvr\") on node \"crc\" DevicePath \"\"" Oct 01 05:45:04 crc kubenswrapper[4661]: I1001 05:45:04.179820 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v" event={"ID":"8716dca9-1210-480b-b460-e41071589e9d","Type":"ContainerDied","Data":"278d47ac492f9db2f0a244290bcc7997e7f3ab4ec1242ac9c1d2a11c66c89685"} Oct 01 05:45:04 crc kubenswrapper[4661]: I1001 05:45:04.179864 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="278d47ac492f9db2f0a244290bcc7997e7f3ab4ec1242ac9c1d2a11c66c89685" Oct 01 05:45:04 crc kubenswrapper[4661]: I1001 05:45:04.179915 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v" Oct 01 05:45:04 crc kubenswrapper[4661]: I1001 05:45:04.309811 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:45:04 crc kubenswrapper[4661]: I1001 05:45:04.309867 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:45:04 crc kubenswrapper[4661]: I1001 05:45:04.309908 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 05:45:04 crc kubenswrapper[4661]: I1001 05:45:04.310299 4661 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"44db5ad49645582db557448c24b5aa4a1f97fa89eaf93393d5999ec82b3f1a5e"} pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 05:45:04 crc kubenswrapper[4661]: I1001 05:45:04.310353 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" containerID="cri-o://44db5ad49645582db557448c24b5aa4a1f97fa89eaf93393d5999ec82b3f1a5e" gracePeriod=600 Oct 01 05:45:05 crc kubenswrapper[4661]: I1001 05:45:05.191622 4661 generic.go:334] "Generic (PLEG): container finished" podID="7584c4bc-4202-487e-a2b4-4319f428a792" containerID="44db5ad49645582db557448c24b5aa4a1f97fa89eaf93393d5999ec82b3f1a5e" exitCode=0 Oct 01 05:45:05 crc kubenswrapper[4661]: I1001 05:45:05.191723 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerDied","Data":"44db5ad49645582db557448c24b5aa4a1f97fa89eaf93393d5999ec82b3f1a5e"} Oct 01 05:45:05 crc kubenswrapper[4661]: I1001 05:45:05.193276 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"7c6267fc47b44c8673e99a573506180a4e4a545631b58c429ea8f0fc9b008d0f"} Oct 01 05:45:05 crc kubenswrapper[4661]: I1001 05:45:05.193411 4661 scope.go:117] "RemoveContainer" containerID="4e198ab64b6d3a437e0b5ab538bb0a82963de29658cbc65ddbf1c080d70a0ec7" Oct 01 05:45:05 crc kubenswrapper[4661]: I1001 05:45:05.939746 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8468885bfc-2t2gk"] Oct 01 05:45:05 crc kubenswrapper[4661]: E1001 05:45:05.940544 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8716dca9-1210-480b-b460-e41071589e9d" containerName="collect-profiles" Oct 01 05:45:05 crc kubenswrapper[4661]: I1001 05:45:05.940577 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="8716dca9-1210-480b-b460-e41071589e9d" containerName="collect-profiles" Oct 01 05:45:05 crc kubenswrapper[4661]: I1001 05:45:05.940883 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="8716dca9-1210-480b-b460-e41071589e9d" containerName="collect-profiles" Oct 01 05:45:05 crc kubenswrapper[4661]: I1001 05:45:05.942149 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8468885bfc-2t2gk" Oct 01 05:45:05 crc kubenswrapper[4661]: I1001 05:45:05.946232 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-cqjws" Oct 01 05:45:05 crc kubenswrapper[4661]: I1001 05:45:05.946729 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 01 05:45:05 crc kubenswrapper[4661]: I1001 05:45:05.947165 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 01 05:45:05 crc kubenswrapper[4661]: I1001 05:45:05.947173 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 01 05:45:05 crc kubenswrapper[4661]: I1001 05:45:05.951209 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8468885bfc-2t2gk"] Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.002564 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-545d49fd5c-7c6nb"] Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.004169 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-545d49fd5c-7c6nb" Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.007017 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.015114 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-545d49fd5c-7c6nb"] Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.114933 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hp476\" (UniqueName: \"kubernetes.io/projected/92bebd22-d17b-4231-a472-46db2c691d3d-kube-api-access-hp476\") pod \"dnsmasq-dns-8468885bfc-2t2gk\" (UID: \"92bebd22-d17b-4231-a472-46db2c691d3d\") " pod="openstack/dnsmasq-dns-8468885bfc-2t2gk" Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.115228 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f9e40475-a6e7-40c4-9053-738327ed551b-dns-svc\") pod \"dnsmasq-dns-545d49fd5c-7c6nb\" (UID: \"f9e40475-a6e7-40c4-9053-738327ed551b\") " pod="openstack/dnsmasq-dns-545d49fd5c-7c6nb" Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.115270 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9e40475-a6e7-40c4-9053-738327ed551b-config\") pod \"dnsmasq-dns-545d49fd5c-7c6nb\" (UID: \"f9e40475-a6e7-40c4-9053-738327ed551b\") " pod="openstack/dnsmasq-dns-545d49fd5c-7c6nb" Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.115288 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msvx8\" (UniqueName: \"kubernetes.io/projected/f9e40475-a6e7-40c4-9053-738327ed551b-kube-api-access-msvx8\") pod \"dnsmasq-dns-545d49fd5c-7c6nb\" (UID: \"f9e40475-a6e7-40c4-9053-738327ed551b\") " pod="openstack/dnsmasq-dns-545d49fd5c-7c6nb" Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.115304 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92bebd22-d17b-4231-a472-46db2c691d3d-config\") pod \"dnsmasq-dns-8468885bfc-2t2gk\" (UID: \"92bebd22-d17b-4231-a472-46db2c691d3d\") " pod="openstack/dnsmasq-dns-8468885bfc-2t2gk" Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.215957 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hp476\" (UniqueName: \"kubernetes.io/projected/92bebd22-d17b-4231-a472-46db2c691d3d-kube-api-access-hp476\") pod \"dnsmasq-dns-8468885bfc-2t2gk\" (UID: \"92bebd22-d17b-4231-a472-46db2c691d3d\") " pod="openstack/dnsmasq-dns-8468885bfc-2t2gk" Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.216002 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f9e40475-a6e7-40c4-9053-738327ed551b-dns-svc\") pod \"dnsmasq-dns-545d49fd5c-7c6nb\" (UID: \"f9e40475-a6e7-40c4-9053-738327ed551b\") " pod="openstack/dnsmasq-dns-545d49fd5c-7c6nb" Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.216034 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9e40475-a6e7-40c4-9053-738327ed551b-config\") pod \"dnsmasq-dns-545d49fd5c-7c6nb\" (UID: \"f9e40475-a6e7-40c4-9053-738327ed551b\") " pod="openstack/dnsmasq-dns-545d49fd5c-7c6nb" Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.216051 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msvx8\" (UniqueName: \"kubernetes.io/projected/f9e40475-a6e7-40c4-9053-738327ed551b-kube-api-access-msvx8\") pod \"dnsmasq-dns-545d49fd5c-7c6nb\" (UID: \"f9e40475-a6e7-40c4-9053-738327ed551b\") " pod="openstack/dnsmasq-dns-545d49fd5c-7c6nb" Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.216067 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92bebd22-d17b-4231-a472-46db2c691d3d-config\") pod \"dnsmasq-dns-8468885bfc-2t2gk\" (UID: \"92bebd22-d17b-4231-a472-46db2c691d3d\") " pod="openstack/dnsmasq-dns-8468885bfc-2t2gk" Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.216929 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92bebd22-d17b-4231-a472-46db2c691d3d-config\") pod \"dnsmasq-dns-8468885bfc-2t2gk\" (UID: \"92bebd22-d17b-4231-a472-46db2c691d3d\") " pod="openstack/dnsmasq-dns-8468885bfc-2t2gk" Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.217006 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f9e40475-a6e7-40c4-9053-738327ed551b-dns-svc\") pod \"dnsmasq-dns-545d49fd5c-7c6nb\" (UID: \"f9e40475-a6e7-40c4-9053-738327ed551b\") " pod="openstack/dnsmasq-dns-545d49fd5c-7c6nb" Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.217208 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9e40475-a6e7-40c4-9053-738327ed551b-config\") pod \"dnsmasq-dns-545d49fd5c-7c6nb\" (UID: \"f9e40475-a6e7-40c4-9053-738327ed551b\") " pod="openstack/dnsmasq-dns-545d49fd5c-7c6nb" Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.236438 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hp476\" (UniqueName: \"kubernetes.io/projected/92bebd22-d17b-4231-a472-46db2c691d3d-kube-api-access-hp476\") pod \"dnsmasq-dns-8468885bfc-2t2gk\" (UID: \"92bebd22-d17b-4231-a472-46db2c691d3d\") " pod="openstack/dnsmasq-dns-8468885bfc-2t2gk" Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.242303 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msvx8\" (UniqueName: \"kubernetes.io/projected/f9e40475-a6e7-40c4-9053-738327ed551b-kube-api-access-msvx8\") pod \"dnsmasq-dns-545d49fd5c-7c6nb\" (UID: \"f9e40475-a6e7-40c4-9053-738327ed551b\") " pod="openstack/dnsmasq-dns-545d49fd5c-7c6nb" Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.268366 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8468885bfc-2t2gk" Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.322386 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-545d49fd5c-7c6nb" Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.594853 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-545d49fd5c-7c6nb"] Oct 01 05:45:06 crc kubenswrapper[4661]: W1001 05:45:06.596873 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf9e40475_a6e7_40c4_9053_738327ed551b.slice/crio-ec11a236e69d03b0ee8f145e8defb7663c5596f367a6130a82389cee75de8eb5 WatchSource:0}: Error finding container ec11a236e69d03b0ee8f145e8defb7663c5596f367a6130a82389cee75de8eb5: Status 404 returned error can't find the container with id ec11a236e69d03b0ee8f145e8defb7663c5596f367a6130a82389cee75de8eb5 Oct 01 05:45:06 crc kubenswrapper[4661]: I1001 05:45:06.726183 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8468885bfc-2t2gk"] Oct 01 05:45:07 crc kubenswrapper[4661]: I1001 05:45:07.207908 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-545d49fd5c-7c6nb" event={"ID":"f9e40475-a6e7-40c4-9053-738327ed551b","Type":"ContainerStarted","Data":"ec11a236e69d03b0ee8f145e8defb7663c5596f367a6130a82389cee75de8eb5"} Oct 01 05:45:07 crc kubenswrapper[4661]: I1001 05:45:07.209855 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8468885bfc-2t2gk" event={"ID":"92bebd22-d17b-4231-a472-46db2c691d3d","Type":"ContainerStarted","Data":"31c262edfe883b36d3879e1810ae63e72ee896d0477271614de43f04c8e2b15c"} Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.428644 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-545d49fd5c-7c6nb"] Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.457830 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-9bd5d9d8c-57j5t"] Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.459611 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9bd5d9d8c-57j5t" Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.471576 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-9bd5d9d8c-57j5t"] Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.472576 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8aef2be-9249-49a0-8f9f-71870656543c-dns-svc\") pod \"dnsmasq-dns-9bd5d9d8c-57j5t\" (UID: \"e8aef2be-9249-49a0-8f9f-71870656543c\") " pod="openstack/dnsmasq-dns-9bd5d9d8c-57j5t" Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.472624 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8aef2be-9249-49a0-8f9f-71870656543c-config\") pod \"dnsmasq-dns-9bd5d9d8c-57j5t\" (UID: \"e8aef2be-9249-49a0-8f9f-71870656543c\") " pod="openstack/dnsmasq-dns-9bd5d9d8c-57j5t" Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.472718 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5cwm\" (UniqueName: \"kubernetes.io/projected/e8aef2be-9249-49a0-8f9f-71870656543c-kube-api-access-b5cwm\") pod \"dnsmasq-dns-9bd5d9d8c-57j5t\" (UID: \"e8aef2be-9249-49a0-8f9f-71870656543c\") " pod="openstack/dnsmasq-dns-9bd5d9d8c-57j5t" Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.575375 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8aef2be-9249-49a0-8f9f-71870656543c-dns-svc\") pod \"dnsmasq-dns-9bd5d9d8c-57j5t\" (UID: \"e8aef2be-9249-49a0-8f9f-71870656543c\") " pod="openstack/dnsmasq-dns-9bd5d9d8c-57j5t" Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.575437 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8aef2be-9249-49a0-8f9f-71870656543c-config\") pod \"dnsmasq-dns-9bd5d9d8c-57j5t\" (UID: \"e8aef2be-9249-49a0-8f9f-71870656543c\") " pod="openstack/dnsmasq-dns-9bd5d9d8c-57j5t" Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.575499 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5cwm\" (UniqueName: \"kubernetes.io/projected/e8aef2be-9249-49a0-8f9f-71870656543c-kube-api-access-b5cwm\") pod \"dnsmasq-dns-9bd5d9d8c-57j5t\" (UID: \"e8aef2be-9249-49a0-8f9f-71870656543c\") " pod="openstack/dnsmasq-dns-9bd5d9d8c-57j5t" Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.576668 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8aef2be-9249-49a0-8f9f-71870656543c-dns-svc\") pod \"dnsmasq-dns-9bd5d9d8c-57j5t\" (UID: \"e8aef2be-9249-49a0-8f9f-71870656543c\") " pod="openstack/dnsmasq-dns-9bd5d9d8c-57j5t" Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.582142 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8aef2be-9249-49a0-8f9f-71870656543c-config\") pod \"dnsmasq-dns-9bd5d9d8c-57j5t\" (UID: \"e8aef2be-9249-49a0-8f9f-71870656543c\") " pod="openstack/dnsmasq-dns-9bd5d9d8c-57j5t" Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.595343 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5cwm\" (UniqueName: \"kubernetes.io/projected/e8aef2be-9249-49a0-8f9f-71870656543c-kube-api-access-b5cwm\") pod \"dnsmasq-dns-9bd5d9d8c-57j5t\" (UID: \"e8aef2be-9249-49a0-8f9f-71870656543c\") " pod="openstack/dnsmasq-dns-9bd5d9d8c-57j5t" Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.707821 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8468885bfc-2t2gk"] Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.734699 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86b8f4ff9-wcnwg"] Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.736281 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86b8f4ff9-wcnwg" Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.748545 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86b8f4ff9-wcnwg"] Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.781236 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phf7l\" (UniqueName: \"kubernetes.io/projected/855ad1a6-cb42-4fdd-9c30-ccc290365265-kube-api-access-phf7l\") pod \"dnsmasq-dns-86b8f4ff9-wcnwg\" (UID: \"855ad1a6-cb42-4fdd-9c30-ccc290365265\") " pod="openstack/dnsmasq-dns-86b8f4ff9-wcnwg" Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.781294 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/855ad1a6-cb42-4fdd-9c30-ccc290365265-dns-svc\") pod \"dnsmasq-dns-86b8f4ff9-wcnwg\" (UID: \"855ad1a6-cb42-4fdd-9c30-ccc290365265\") " pod="openstack/dnsmasq-dns-86b8f4ff9-wcnwg" Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.781321 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/855ad1a6-cb42-4fdd-9c30-ccc290365265-config\") pod \"dnsmasq-dns-86b8f4ff9-wcnwg\" (UID: \"855ad1a6-cb42-4fdd-9c30-ccc290365265\") " pod="openstack/dnsmasq-dns-86b8f4ff9-wcnwg" Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.781826 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9bd5d9d8c-57j5t" Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.882242 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phf7l\" (UniqueName: \"kubernetes.io/projected/855ad1a6-cb42-4fdd-9c30-ccc290365265-kube-api-access-phf7l\") pod \"dnsmasq-dns-86b8f4ff9-wcnwg\" (UID: \"855ad1a6-cb42-4fdd-9c30-ccc290365265\") " pod="openstack/dnsmasq-dns-86b8f4ff9-wcnwg" Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.882302 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/855ad1a6-cb42-4fdd-9c30-ccc290365265-dns-svc\") pod \"dnsmasq-dns-86b8f4ff9-wcnwg\" (UID: \"855ad1a6-cb42-4fdd-9c30-ccc290365265\") " pod="openstack/dnsmasq-dns-86b8f4ff9-wcnwg" Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.882330 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/855ad1a6-cb42-4fdd-9c30-ccc290365265-config\") pod \"dnsmasq-dns-86b8f4ff9-wcnwg\" (UID: \"855ad1a6-cb42-4fdd-9c30-ccc290365265\") " pod="openstack/dnsmasq-dns-86b8f4ff9-wcnwg" Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.883253 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/855ad1a6-cb42-4fdd-9c30-ccc290365265-config\") pod \"dnsmasq-dns-86b8f4ff9-wcnwg\" (UID: \"855ad1a6-cb42-4fdd-9c30-ccc290365265\") " pod="openstack/dnsmasq-dns-86b8f4ff9-wcnwg" Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.883701 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/855ad1a6-cb42-4fdd-9c30-ccc290365265-dns-svc\") pod \"dnsmasq-dns-86b8f4ff9-wcnwg\" (UID: \"855ad1a6-cb42-4fdd-9c30-ccc290365265\") " pod="openstack/dnsmasq-dns-86b8f4ff9-wcnwg" Oct 01 05:45:09 crc kubenswrapper[4661]: I1001 05:45:09.909897 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phf7l\" (UniqueName: \"kubernetes.io/projected/855ad1a6-cb42-4fdd-9c30-ccc290365265-kube-api-access-phf7l\") pod \"dnsmasq-dns-86b8f4ff9-wcnwg\" (UID: \"855ad1a6-cb42-4fdd-9c30-ccc290365265\") " pod="openstack/dnsmasq-dns-86b8f4ff9-wcnwg" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.102517 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86b8f4ff9-wcnwg" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.146031 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86b8f4ff9-wcnwg"] Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.187158 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5449989c59-nh67m"] Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.194602 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5449989c59-nh67m" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.211345 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5449989c59-nh67m"] Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.296784 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-9bd5d9d8c-57j5t"] Oct 01 05:45:10 crc kubenswrapper[4661]: W1001 05:45:10.310746 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8aef2be_9249_49a0_8f9f_71870656543c.slice/crio-540f6c3d8f764192c36f223cfd16e4aee6e736879682564bedd2297d50975774 WatchSource:0}: Error finding container 540f6c3d8f764192c36f223cfd16e4aee6e736879682564bedd2297d50975774: Status 404 returned error can't find the container with id 540f6c3d8f764192c36f223cfd16e4aee6e736879682564bedd2297d50975774 Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.392249 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brvsm\" (UniqueName: \"kubernetes.io/projected/677c3be8-2587-44d1-8545-65238de20248-kube-api-access-brvsm\") pod \"dnsmasq-dns-5449989c59-nh67m\" (UID: \"677c3be8-2587-44d1-8545-65238de20248\") " pod="openstack/dnsmasq-dns-5449989c59-nh67m" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.392299 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/677c3be8-2587-44d1-8545-65238de20248-dns-svc\") pod \"dnsmasq-dns-5449989c59-nh67m\" (UID: \"677c3be8-2587-44d1-8545-65238de20248\") " pod="openstack/dnsmasq-dns-5449989c59-nh67m" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.392389 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/677c3be8-2587-44d1-8545-65238de20248-config\") pod \"dnsmasq-dns-5449989c59-nh67m\" (UID: \"677c3be8-2587-44d1-8545-65238de20248\") " pod="openstack/dnsmasq-dns-5449989c59-nh67m" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.494173 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/677c3be8-2587-44d1-8545-65238de20248-config\") pod \"dnsmasq-dns-5449989c59-nh67m\" (UID: \"677c3be8-2587-44d1-8545-65238de20248\") " pod="openstack/dnsmasq-dns-5449989c59-nh67m" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.494267 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brvsm\" (UniqueName: \"kubernetes.io/projected/677c3be8-2587-44d1-8545-65238de20248-kube-api-access-brvsm\") pod \"dnsmasq-dns-5449989c59-nh67m\" (UID: \"677c3be8-2587-44d1-8545-65238de20248\") " pod="openstack/dnsmasq-dns-5449989c59-nh67m" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.494296 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/677c3be8-2587-44d1-8545-65238de20248-dns-svc\") pod \"dnsmasq-dns-5449989c59-nh67m\" (UID: \"677c3be8-2587-44d1-8545-65238de20248\") " pod="openstack/dnsmasq-dns-5449989c59-nh67m" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.495164 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/677c3be8-2587-44d1-8545-65238de20248-config\") pod \"dnsmasq-dns-5449989c59-nh67m\" (UID: \"677c3be8-2587-44d1-8545-65238de20248\") " pod="openstack/dnsmasq-dns-5449989c59-nh67m" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.495347 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/677c3be8-2587-44d1-8545-65238de20248-dns-svc\") pod \"dnsmasq-dns-5449989c59-nh67m\" (UID: \"677c3be8-2587-44d1-8545-65238de20248\") " pod="openstack/dnsmasq-dns-5449989c59-nh67m" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.516164 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brvsm\" (UniqueName: \"kubernetes.io/projected/677c3be8-2587-44d1-8545-65238de20248-kube-api-access-brvsm\") pod \"dnsmasq-dns-5449989c59-nh67m\" (UID: \"677c3be8-2587-44d1-8545-65238de20248\") " pod="openstack/dnsmasq-dns-5449989c59-nh67m" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.524591 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5449989c59-nh67m" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.603568 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.618262 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.618375 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.622324 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-tsxss" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.622345 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.622449 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.622520 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.622604 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.622682 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.625419 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.705115 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/31336b4a-1953-44ab-b229-401a3a3ac031-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.705184 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/31336b4a-1953-44ab-b229-401a3a3ac031-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.705225 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.705279 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.705299 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.705342 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.705358 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xmk5\" (UniqueName: \"kubernetes.io/projected/31336b4a-1953-44ab-b229-401a3a3ac031-kube-api-access-8xmk5\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.705398 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/31336b4a-1953-44ab-b229-401a3a3ac031-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.705415 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/31336b4a-1953-44ab-b229-401a3a3ac031-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.705444 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/31336b4a-1953-44ab-b229-401a3a3ac031-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.705470 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.748191 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86b8f4ff9-wcnwg"] Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.806578 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.806618 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xmk5\" (UniqueName: \"kubernetes.io/projected/31336b4a-1953-44ab-b229-401a3a3ac031-kube-api-access-8xmk5\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.806662 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/31336b4a-1953-44ab-b229-401a3a3ac031-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.806677 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/31336b4a-1953-44ab-b229-401a3a3ac031-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.806733 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/31336b4a-1953-44ab-b229-401a3a3ac031-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.806786 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.806807 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/31336b4a-1953-44ab-b229-401a3a3ac031-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.806822 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/31336b4a-1953-44ab-b229-401a3a3ac031-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.808415 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.809294 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/31336b4a-1953-44ab-b229-401a3a3ac031-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.812315 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/31336b4a-1953-44ab-b229-401a3a3ac031-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.813535 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/31336b4a-1953-44ab-b229-401a3a3ac031-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.814479 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.814605 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.814642 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.815514 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.818968 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/31336b4a-1953-44ab-b229-401a3a3ac031-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.821806 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.822099 4661 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.823151 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.827119 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xmk5\" (UniqueName: \"kubernetes.io/projected/31336b4a-1953-44ab-b229-401a3a3ac031-kube-api-access-8xmk5\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.833541 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/31336b4a-1953-44ab-b229-401a3a3ac031-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.853325 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.883663 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.888347 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.890498 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-x9srl" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.890549 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.890737 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.890816 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.891056 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.891294 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.893828 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.935253 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 05:45:10 crc kubenswrapper[4661]: I1001 05:45:10.944472 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.017493 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5449989c59-nh67m"] Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.026894 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfxbs\" (UniqueName: \"kubernetes.io/projected/7b2acad5-a746-42a5-b9e8-a9904ad242bc-kube-api-access-pfxbs\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.026944 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.026964 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.026980 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7b2acad5-a746-42a5-b9e8-a9904ad242bc-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.027023 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7b2acad5-a746-42a5-b9e8-a9904ad242bc-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.027066 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.027090 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.027110 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.027126 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7b2acad5-a746-42a5-b9e8-a9904ad242bc-config-data\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.027152 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7b2acad5-a746-42a5-b9e8-a9904ad242bc-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.027170 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7b2acad5-a746-42a5-b9e8-a9904ad242bc-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: W1001 05:45:11.033942 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod677c3be8_2587_44d1_8545_65238de20248.slice/crio-9cf828550b84c005e0e571a7cb0e572ca4d4bc26597d9d9e9b6159e2d8a9ed12 WatchSource:0}: Error finding container 9cf828550b84c005e0e571a7cb0e572ca4d4bc26597d9d9e9b6159e2d8a9ed12: Status 404 returned error can't find the container with id 9cf828550b84c005e0e571a7cb0e572ca4d4bc26597d9d9e9b6159e2d8a9ed12 Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.128707 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7b2acad5-a746-42a5-b9e8-a9904ad242bc-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.128773 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.128799 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.128823 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.128841 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7b2acad5-a746-42a5-b9e8-a9904ad242bc-config-data\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.128870 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7b2acad5-a746-42a5-b9e8-a9904ad242bc-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.128887 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7b2acad5-a746-42a5-b9e8-a9904ad242bc-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.128903 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfxbs\" (UniqueName: \"kubernetes.io/projected/7b2acad5-a746-42a5-b9e8-a9904ad242bc-kube-api-access-pfxbs\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.128927 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.128950 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7b2acad5-a746-42a5-b9e8-a9904ad242bc-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.128966 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.130026 4661 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.132111 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7b2acad5-a746-42a5-b9e8-a9904ad242bc-config-data\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.132153 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.132169 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7b2acad5-a746-42a5-b9e8-a9904ad242bc-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.132295 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.132941 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7b2acad5-a746-42a5-b9e8-a9904ad242bc-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.136475 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.142648 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7b2acad5-a746-42a5-b9e8-a9904ad242bc-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.147103 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7b2acad5-a746-42a5-b9e8-a9904ad242bc-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.149754 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.166171 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfxbs\" (UniqueName: \"kubernetes.io/projected/7b2acad5-a746-42a5-b9e8-a9904ad242bc-kube-api-access-pfxbs\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.172709 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.214624 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.216927 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.222909 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-erlang-cookie" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.223095 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-config-data" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.223213 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-plugins-conf" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.223316 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-server-dockercfg-f9bjx" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.223747 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-server-conf" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.224049 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-notifications-svc" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.224232 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-default-user" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.228437 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.248841 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.295263 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86b8f4ff9-wcnwg" event={"ID":"855ad1a6-cb42-4fdd-9c30-ccc290365265","Type":"ContainerStarted","Data":"add7a85a4c0fa3102462b28c999432e6320fac1ee6b9098a2e7f53ff3e4c8ba7"} Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.299705 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5449989c59-nh67m" event={"ID":"677c3be8-2587-44d1-8545-65238de20248","Type":"ContainerStarted","Data":"9cf828550b84c005e0e571a7cb0e572ca4d4bc26597d9d9e9b6159e2d8a9ed12"} Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.301853 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9bd5d9d8c-57j5t" event={"ID":"e8aef2be-9249-49a0-8f9f-71870656543c","Type":"ContainerStarted","Data":"540f6c3d8f764192c36f223cfd16e4aee6e736879682564bedd2297d50975774"} Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.335650 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1658ccd7-4bae-45bf-aa67-fc5c075a417c-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.335695 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1658ccd7-4bae-45bf-aa67-fc5c075a417c-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.335716 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1658ccd7-4bae-45bf-aa67-fc5c075a417c-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.335739 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1658ccd7-4bae-45bf-aa67-fc5c075a417c-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.335824 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1658ccd7-4bae-45bf-aa67-fc5c075a417c-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.335861 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1658ccd7-4bae-45bf-aa67-fc5c075a417c-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.335888 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1658ccd7-4bae-45bf-aa67-fc5c075a417c-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.335905 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.335919 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1658ccd7-4bae-45bf-aa67-fc5c075a417c-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.335935 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2rjr\" (UniqueName: \"kubernetes.io/projected/1658ccd7-4bae-45bf-aa67-fc5c075a417c-kube-api-access-f2rjr\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.335970 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1658ccd7-4bae-45bf-aa67-fc5c075a417c-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.440202 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1658ccd7-4bae-45bf-aa67-fc5c075a417c-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.440520 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1658ccd7-4bae-45bf-aa67-fc5c075a417c-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.440543 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1658ccd7-4bae-45bf-aa67-fc5c075a417c-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.440928 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1658ccd7-4bae-45bf-aa67-fc5c075a417c-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.440977 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1658ccd7-4bae-45bf-aa67-fc5c075a417c-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.441007 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1658ccd7-4bae-45bf-aa67-fc5c075a417c-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.441028 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.441046 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1658ccd7-4bae-45bf-aa67-fc5c075a417c-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.441064 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2rjr\" (UniqueName: \"kubernetes.io/projected/1658ccd7-4bae-45bf-aa67-fc5c075a417c-kube-api-access-f2rjr\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.441081 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1658ccd7-4bae-45bf-aa67-fc5c075a417c-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.441106 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1658ccd7-4bae-45bf-aa67-fc5c075a417c-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.441124 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1658ccd7-4bae-45bf-aa67-fc5c075a417c-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.441289 4661 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.442105 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1658ccd7-4bae-45bf-aa67-fc5c075a417c-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.443061 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1658ccd7-4bae-45bf-aa67-fc5c075a417c-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.443316 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1658ccd7-4bae-45bf-aa67-fc5c075a417c-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.445563 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1658ccd7-4bae-45bf-aa67-fc5c075a417c-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.448761 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1658ccd7-4bae-45bf-aa67-fc5c075a417c-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.453437 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1658ccd7-4bae-45bf-aa67-fc5c075a417c-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.453981 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1658ccd7-4bae-45bf-aa67-fc5c075a417c-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.454188 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1658ccd7-4bae-45bf-aa67-fc5c075a417c-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.458702 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2rjr\" (UniqueName: \"kubernetes.io/projected/1658ccd7-4bae-45bf-aa67-fc5c075a417c-kube-api-access-f2rjr\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.462205 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 05:45:11 crc kubenswrapper[4661]: W1001 05:45:11.466022 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod31336b4a_1953_44ab_b229_401a3a3ac031.slice/crio-6a0277fb2b76af082657120ed26d867ca9fd948146d4466fd81f71ace3603542 WatchSource:0}: Error finding container 6a0277fb2b76af082657120ed26d867ca9fd948146d4466fd81f71ace3603542: Status 404 returned error can't find the container with id 6a0277fb2b76af082657120ed26d867ca9fd948146d4466fd81f71ace3603542 Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.471712 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"1658ccd7-4bae-45bf-aa67-fc5c075a417c\") " pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.553643 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.700529 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 05:45:11 crc kubenswrapper[4661]: W1001 05:45:11.728105 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7b2acad5_a746_42a5_b9e8_a9904ad242bc.slice/crio-a2ccef982f76a0faf7b5093c5021207aab6c5251d62c92673096569f62dcaadf WatchSource:0}: Error finding container a2ccef982f76a0faf7b5093c5021207aab6c5251d62c92673096569f62dcaadf: Status 404 returned error can't find the container with id a2ccef982f76a0faf7b5093c5021207aab6c5251d62c92673096569f62dcaadf Oct 01 05:45:11 crc kubenswrapper[4661]: I1001 05:45:11.972335 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Oct 01 05:45:11 crc kubenswrapper[4661]: W1001 05:45:11.986375 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1658ccd7_4bae_45bf_aa67_fc5c075a417c.slice/crio-62affe935f8224f4d3c8414e30beaf9d5d1fe9062b134a1b951bd8a99623c12d WatchSource:0}: Error finding container 62affe935f8224f4d3c8414e30beaf9d5d1fe9062b134a1b951bd8a99623c12d: Status 404 returned error can't find the container with id 62affe935f8224f4d3c8414e30beaf9d5d1fe9062b134a1b951bd8a99623c12d Oct 01 05:45:12 crc kubenswrapper[4661]: I1001 05:45:12.312225 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"31336b4a-1953-44ab-b229-401a3a3ac031","Type":"ContainerStarted","Data":"6a0277fb2b76af082657120ed26d867ca9fd948146d4466fd81f71ace3603542"} Oct 01 05:45:12 crc kubenswrapper[4661]: I1001 05:45:12.313207 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7b2acad5-a746-42a5-b9e8-a9904ad242bc","Type":"ContainerStarted","Data":"a2ccef982f76a0faf7b5093c5021207aab6c5251d62c92673096569f62dcaadf"} Oct 01 05:45:12 crc kubenswrapper[4661]: I1001 05:45:12.314407 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"1658ccd7-4bae-45bf-aa67-fc5c075a417c","Type":"ContainerStarted","Data":"62affe935f8224f4d3c8414e30beaf9d5d1fe9062b134a1b951bd8a99623c12d"} Oct 01 05:45:13 crc kubenswrapper[4661]: I1001 05:45:13.862366 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 01 05:45:13 crc kubenswrapper[4661]: I1001 05:45:13.865130 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 01 05:45:13 crc kubenswrapper[4661]: I1001 05:45:13.867987 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 01 05:45:13 crc kubenswrapper[4661]: I1001 05:45:13.869968 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 01 05:45:13 crc kubenswrapper[4661]: I1001 05:45:13.871116 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 01 05:45:13 crc kubenswrapper[4661]: I1001 05:45:13.871263 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 01 05:45:13 crc kubenswrapper[4661]: I1001 05:45:13.871391 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-zrfp9" Oct 01 05:45:13 crc kubenswrapper[4661]: I1001 05:45:13.871566 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 01 05:45:13 crc kubenswrapper[4661]: I1001 05:45:13.877407 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.005928 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/daca2202-a971-4201-81be-edef6f0c40f6-operator-scripts\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.005979 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/daca2202-a971-4201-81be-edef6f0c40f6-config-data-generated\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.006002 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daca2202-a971-4201-81be-edef6f0c40f6-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.006043 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/daca2202-a971-4201-81be-edef6f0c40f6-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.006079 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/daca2202-a971-4201-81be-edef6f0c40f6-secrets\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.006096 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22bsw\" (UniqueName: \"kubernetes.io/projected/daca2202-a971-4201-81be-edef6f0c40f6-kube-api-access-22bsw\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.006111 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/daca2202-a971-4201-81be-edef6f0c40f6-kolla-config\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.006156 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.006197 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/daca2202-a971-4201-81be-edef6f0c40f6-config-data-default\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.107069 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/daca2202-a971-4201-81be-edef6f0c40f6-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.107115 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/daca2202-a971-4201-81be-edef6f0c40f6-secrets\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.107133 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22bsw\" (UniqueName: \"kubernetes.io/projected/daca2202-a971-4201-81be-edef6f0c40f6-kube-api-access-22bsw\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.107149 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/daca2202-a971-4201-81be-edef6f0c40f6-kolla-config\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.107171 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.107205 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/daca2202-a971-4201-81be-edef6f0c40f6-config-data-default\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.107255 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/daca2202-a971-4201-81be-edef6f0c40f6-operator-scripts\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.107273 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/daca2202-a971-4201-81be-edef6f0c40f6-config-data-generated\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.107292 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daca2202-a971-4201-81be-edef6f0c40f6-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.108220 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/daca2202-a971-4201-81be-edef6f0c40f6-kolla-config\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.108258 4661 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.108472 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/daca2202-a971-4201-81be-edef6f0c40f6-config-data-default\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.108729 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/daca2202-a971-4201-81be-edef6f0c40f6-config-data-generated\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.109108 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/daca2202-a971-4201-81be-edef6f0c40f6-operator-scripts\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.114322 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/daca2202-a971-4201-81be-edef6f0c40f6-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.131088 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/daca2202-a971-4201-81be-edef6f0c40f6-secrets\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.132141 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daca2202-a971-4201-81be-edef6f0c40f6-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.134377 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22bsw\" (UniqueName: \"kubernetes.io/projected/daca2202-a971-4201-81be-edef6f0c40f6-kube-api-access-22bsw\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.158396 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"daca2202-a971-4201-81be-edef6f0c40f6\") " pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.193129 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.274391 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.276439 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.279282 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.279699 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.279882 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-85gwb" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.280156 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.303846 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.411251 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.411293 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e892c883-6f23-415f-9e9d-bde45fefe01e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.411326 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/e892c883-6f23-415f-9e9d-bde45fefe01e-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.411353 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e892c883-6f23-415f-9e9d-bde45fefe01e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.411377 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e892c883-6f23-415f-9e9d-bde45fefe01e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.411410 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e892c883-6f23-415f-9e9d-bde45fefe01e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.411433 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6psn\" (UniqueName: \"kubernetes.io/projected/e892c883-6f23-415f-9e9d-bde45fefe01e-kube-api-access-p6psn\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.411471 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e892c883-6f23-415f-9e9d-bde45fefe01e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.411491 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e892c883-6f23-415f-9e9d-bde45fefe01e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.514964 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/e892c883-6f23-415f-9e9d-bde45fefe01e-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.515069 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e892c883-6f23-415f-9e9d-bde45fefe01e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.515121 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e892c883-6f23-415f-9e9d-bde45fefe01e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.515194 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e892c883-6f23-415f-9e9d-bde45fefe01e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.515281 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6psn\" (UniqueName: \"kubernetes.io/projected/e892c883-6f23-415f-9e9d-bde45fefe01e-kube-api-access-p6psn\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.515358 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e892c883-6f23-415f-9e9d-bde45fefe01e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.515393 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e892c883-6f23-415f-9e9d-bde45fefe01e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.515455 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.515482 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e892c883-6f23-415f-9e9d-bde45fefe01e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.517080 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e892c883-6f23-415f-9e9d-bde45fefe01e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.520590 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/e892c883-6f23-415f-9e9d-bde45fefe01e-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.520682 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e892c883-6f23-415f-9e9d-bde45fefe01e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.521499 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e892c883-6f23-415f-9e9d-bde45fefe01e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.523348 4661 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.524548 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e892c883-6f23-415f-9e9d-bde45fefe01e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.526818 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e892c883-6f23-415f-9e9d-bde45fefe01e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.535342 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e892c883-6f23-415f-9e9d-bde45fefe01e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.539146 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6psn\" (UniqueName: \"kubernetes.io/projected/e892c883-6f23-415f-9e9d-bde45fefe01e-kube-api-access-p6psn\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.560713 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"e892c883-6f23-415f-9e9d-bde45fefe01e\") " pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.606479 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.646169 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.806317 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.807262 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.822315 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.822493 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.822598 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-2jrtf" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.825321 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.922530 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2741b07c-1750-4920-a734-2f51af08ac8b-combined-ca-bundle\") pod \"memcached-0\" (UID: \"2741b07c-1750-4920-a734-2f51af08ac8b\") " pod="openstack/memcached-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.922574 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2741b07c-1750-4920-a734-2f51af08ac8b-config-data\") pod \"memcached-0\" (UID: \"2741b07c-1750-4920-a734-2f51af08ac8b\") " pod="openstack/memcached-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.922603 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/2741b07c-1750-4920-a734-2f51af08ac8b-memcached-tls-certs\") pod \"memcached-0\" (UID: \"2741b07c-1750-4920-a734-2f51af08ac8b\") " pod="openstack/memcached-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.922621 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2741b07c-1750-4920-a734-2f51af08ac8b-kolla-config\") pod \"memcached-0\" (UID: \"2741b07c-1750-4920-a734-2f51af08ac8b\") " pod="openstack/memcached-0" Oct 01 05:45:14 crc kubenswrapper[4661]: I1001 05:45:14.922679 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djgv6\" (UniqueName: \"kubernetes.io/projected/2741b07c-1750-4920-a734-2f51af08ac8b-kube-api-access-djgv6\") pod \"memcached-0\" (UID: \"2741b07c-1750-4920-a734-2f51af08ac8b\") " pod="openstack/memcached-0" Oct 01 05:45:15 crc kubenswrapper[4661]: I1001 05:45:15.024751 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djgv6\" (UniqueName: \"kubernetes.io/projected/2741b07c-1750-4920-a734-2f51af08ac8b-kube-api-access-djgv6\") pod \"memcached-0\" (UID: \"2741b07c-1750-4920-a734-2f51af08ac8b\") " pod="openstack/memcached-0" Oct 01 05:45:15 crc kubenswrapper[4661]: I1001 05:45:15.024891 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2741b07c-1750-4920-a734-2f51af08ac8b-combined-ca-bundle\") pod \"memcached-0\" (UID: \"2741b07c-1750-4920-a734-2f51af08ac8b\") " pod="openstack/memcached-0" Oct 01 05:45:15 crc kubenswrapper[4661]: I1001 05:45:15.024914 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2741b07c-1750-4920-a734-2f51af08ac8b-config-data\") pod \"memcached-0\" (UID: \"2741b07c-1750-4920-a734-2f51af08ac8b\") " pod="openstack/memcached-0" Oct 01 05:45:15 crc kubenswrapper[4661]: I1001 05:45:15.024989 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/2741b07c-1750-4920-a734-2f51af08ac8b-memcached-tls-certs\") pod \"memcached-0\" (UID: \"2741b07c-1750-4920-a734-2f51af08ac8b\") " pod="openstack/memcached-0" Oct 01 05:45:15 crc kubenswrapper[4661]: I1001 05:45:15.025007 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2741b07c-1750-4920-a734-2f51af08ac8b-kolla-config\") pod \"memcached-0\" (UID: \"2741b07c-1750-4920-a734-2f51af08ac8b\") " pod="openstack/memcached-0" Oct 01 05:45:15 crc kubenswrapper[4661]: I1001 05:45:15.026556 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2741b07c-1750-4920-a734-2f51af08ac8b-config-data\") pod \"memcached-0\" (UID: \"2741b07c-1750-4920-a734-2f51af08ac8b\") " pod="openstack/memcached-0" Oct 01 05:45:15 crc kubenswrapper[4661]: I1001 05:45:15.027549 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2741b07c-1750-4920-a734-2f51af08ac8b-kolla-config\") pod \"memcached-0\" (UID: \"2741b07c-1750-4920-a734-2f51af08ac8b\") " pod="openstack/memcached-0" Oct 01 05:45:15 crc kubenswrapper[4661]: I1001 05:45:15.029824 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/2741b07c-1750-4920-a734-2f51af08ac8b-memcached-tls-certs\") pod \"memcached-0\" (UID: \"2741b07c-1750-4920-a734-2f51af08ac8b\") " pod="openstack/memcached-0" Oct 01 05:45:15 crc kubenswrapper[4661]: I1001 05:45:15.030031 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2741b07c-1750-4920-a734-2f51af08ac8b-combined-ca-bundle\") pod \"memcached-0\" (UID: \"2741b07c-1750-4920-a734-2f51af08ac8b\") " pod="openstack/memcached-0" Oct 01 05:45:15 crc kubenswrapper[4661]: I1001 05:45:15.046167 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djgv6\" (UniqueName: \"kubernetes.io/projected/2741b07c-1750-4920-a734-2f51af08ac8b-kube-api-access-djgv6\") pod \"memcached-0\" (UID: \"2741b07c-1750-4920-a734-2f51af08ac8b\") " pod="openstack/memcached-0" Oct 01 05:45:15 crc kubenswrapper[4661]: I1001 05:45:15.153420 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 01 05:45:15 crc kubenswrapper[4661]: I1001 05:45:15.155288 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 01 05:45:15 crc kubenswrapper[4661]: W1001 05:45:15.170588 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode892c883_6f23_415f_9e9d_bde45fefe01e.slice/crio-b8f1586f7fdc7bb9e3629c4634ac371eb42cd5afe37bc725fa39fbe792ddf556 WatchSource:0}: Error finding container b8f1586f7fdc7bb9e3629c4634ac371eb42cd5afe37bc725fa39fbe792ddf556: Status 404 returned error can't find the container with id b8f1586f7fdc7bb9e3629c4634ac371eb42cd5afe37bc725fa39fbe792ddf556 Oct 01 05:45:15 crc kubenswrapper[4661]: I1001 05:45:15.378755 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e892c883-6f23-415f-9e9d-bde45fefe01e","Type":"ContainerStarted","Data":"b8f1586f7fdc7bb9e3629c4634ac371eb42cd5afe37bc725fa39fbe792ddf556"} Oct 01 05:45:15 crc kubenswrapper[4661]: I1001 05:45:15.380137 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"daca2202-a971-4201-81be-edef6f0c40f6","Type":"ContainerStarted","Data":"39339c97a76fcd383e92a6a5fd624305fe078a9338118d5235694a6c0890756a"} Oct 01 05:45:15 crc kubenswrapper[4661]: W1001 05:45:15.609381 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2741b07c_1750_4920_a734_2f51af08ac8b.slice/crio-6161fac53f57809f6ed61f9e710248b19f19552e22c23790bfcfffd295a851b4 WatchSource:0}: Error finding container 6161fac53f57809f6ed61f9e710248b19f19552e22c23790bfcfffd295a851b4: Status 404 returned error can't find the container with id 6161fac53f57809f6ed61f9e710248b19f19552e22c23790bfcfffd295a851b4 Oct 01 05:45:15 crc kubenswrapper[4661]: I1001 05:45:15.620619 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 01 05:45:16 crc kubenswrapper[4661]: I1001 05:45:16.392450 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"2741b07c-1750-4920-a734-2f51af08ac8b","Type":"ContainerStarted","Data":"6161fac53f57809f6ed61f9e710248b19f19552e22c23790bfcfffd295a851b4"} Oct 01 05:45:16 crc kubenswrapper[4661]: I1001 05:45:16.600311 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 05:45:16 crc kubenswrapper[4661]: I1001 05:45:16.603201 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 01 05:45:16 crc kubenswrapper[4661]: I1001 05:45:16.611102 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 05:45:16 crc kubenswrapper[4661]: I1001 05:45:16.611666 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-vn45w" Oct 01 05:45:16 crc kubenswrapper[4661]: I1001 05:45:16.756330 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2gzz\" (UniqueName: \"kubernetes.io/projected/0e50e984-837a-48c4-ac76-c62066f13512-kube-api-access-c2gzz\") pod \"kube-state-metrics-0\" (UID: \"0e50e984-837a-48c4-ac76-c62066f13512\") " pod="openstack/kube-state-metrics-0" Oct 01 05:45:16 crc kubenswrapper[4661]: I1001 05:45:16.862436 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2gzz\" (UniqueName: \"kubernetes.io/projected/0e50e984-837a-48c4-ac76-c62066f13512-kube-api-access-c2gzz\") pod \"kube-state-metrics-0\" (UID: \"0e50e984-837a-48c4-ac76-c62066f13512\") " pod="openstack/kube-state-metrics-0" Oct 01 05:45:16 crc kubenswrapper[4661]: I1001 05:45:16.884622 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2gzz\" (UniqueName: \"kubernetes.io/projected/0e50e984-837a-48c4-ac76-c62066f13512-kube-api-access-c2gzz\") pod \"kube-state-metrics-0\" (UID: \"0e50e984-837a-48c4-ac76-c62066f13512\") " pod="openstack/kube-state-metrics-0" Oct 01 05:45:16 crc kubenswrapper[4661]: I1001 05:45:16.977993 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 01 05:45:17 crc kubenswrapper[4661]: I1001 05:45:17.656587 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 05:45:17 crc kubenswrapper[4661]: I1001 05:45:17.951549 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 01 05:45:17 crc kubenswrapper[4661]: I1001 05:45:17.954419 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:17 crc kubenswrapper[4661]: I1001 05:45:17.955972 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 01 05:45:17 crc kubenswrapper[4661]: I1001 05:45:17.958965 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Oct 01 05:45:17 crc kubenswrapper[4661]: I1001 05:45:17.959200 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Oct 01 05:45:17 crc kubenswrapper[4661]: I1001 05:45:17.959642 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Oct 01 05:45:17 crc kubenswrapper[4661]: I1001 05:45:17.959891 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-rxwq4" Oct 01 05:45:17 crc kubenswrapper[4661]: I1001 05:45:17.959936 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Oct 01 05:45:17 crc kubenswrapper[4661]: I1001 05:45:17.965560 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.114528 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/12b98504-ea2f-4b12-a55e-5c3f514817a4-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.114574 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/12b98504-ea2f-4b12-a55e-5c3f514817a4-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.114713 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/12b98504-ea2f-4b12-a55e-5c3f514817a4-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.114737 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/12b98504-ea2f-4b12-a55e-5c3f514817a4-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.114764 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/12b98504-ea2f-4b12-a55e-5c3f514817a4-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.114795 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wltdz\" (UniqueName: \"kubernetes.io/projected/12b98504-ea2f-4b12-a55e-5c3f514817a4-kube-api-access-wltdz\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.114822 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-70227665-af75-4dfe-9648-95f31cf3d818\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70227665-af75-4dfe-9648-95f31cf3d818\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.114840 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/12b98504-ea2f-4b12-a55e-5c3f514817a4-config\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.217621 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/12b98504-ea2f-4b12-a55e-5c3f514817a4-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.217677 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/12b98504-ea2f-4b12-a55e-5c3f514817a4-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.217706 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/12b98504-ea2f-4b12-a55e-5c3f514817a4-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.217736 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wltdz\" (UniqueName: \"kubernetes.io/projected/12b98504-ea2f-4b12-a55e-5c3f514817a4-kube-api-access-wltdz\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.217761 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-70227665-af75-4dfe-9648-95f31cf3d818\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70227665-af75-4dfe-9648-95f31cf3d818\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.217777 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/12b98504-ea2f-4b12-a55e-5c3f514817a4-config\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.217796 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/12b98504-ea2f-4b12-a55e-5c3f514817a4-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.217810 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/12b98504-ea2f-4b12-a55e-5c3f514817a4-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.221821 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/12b98504-ea2f-4b12-a55e-5c3f514817a4-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.232999 4661 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.233070 4661 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-70227665-af75-4dfe-9648-95f31cf3d818\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70227665-af75-4dfe-9648-95f31cf3d818\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e51bf99238560523215dee685b077c67fdd0498f27e19b8c5ba6a080034e1ca7/globalmount\"" pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.240145 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/12b98504-ea2f-4b12-a55e-5c3f514817a4-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.242686 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wltdz\" (UniqueName: \"kubernetes.io/projected/12b98504-ea2f-4b12-a55e-5c3f514817a4-kube-api-access-wltdz\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.245875 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/12b98504-ea2f-4b12-a55e-5c3f514817a4-config\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.255768 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/12b98504-ea2f-4b12-a55e-5c3f514817a4-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.256096 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/12b98504-ea2f-4b12-a55e-5c3f514817a4-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.258535 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/12b98504-ea2f-4b12-a55e-5c3f514817a4-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.280691 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-70227665-af75-4dfe-9648-95f31cf3d818\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70227665-af75-4dfe-9648-95f31cf3d818\") pod \"prometheus-metric-storage-0\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.463007 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"0e50e984-837a-48c4-ac76-c62066f13512","Type":"ContainerStarted","Data":"ab5c7be3b9dddb38e1c079da2093817ec5a620fc34a366bcd421c096d913dde6"} Oct 01 05:45:18 crc kubenswrapper[4661]: I1001 05:45:18.588655 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.792999 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-mplg4"] Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.795646 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mplg4" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.800066 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-mzvwd" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.800486 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.801072 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.804340 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-mplg4"] Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.808492 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-wmxcl"] Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.810279 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.812603 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-wmxcl"] Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.864756 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.867443 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.871349 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.871659 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.871770 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.871890 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-8dfn5" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.872603 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.872853 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.877737 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c5001332-068e-46eb-a21c-25e29832baab-scripts\") pod \"ovn-controller-mplg4\" (UID: \"c5001332-068e-46eb-a21c-25e29832baab\") " pod="openstack/ovn-controller-mplg4" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.877818 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rm9zp\" (UniqueName: \"kubernetes.io/projected/c5001332-068e-46eb-a21c-25e29832baab-kube-api-access-rm9zp\") pod \"ovn-controller-mplg4\" (UID: \"c5001332-068e-46eb-a21c-25e29832baab\") " pod="openstack/ovn-controller-mplg4" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.877839 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5001332-068e-46eb-a21c-25e29832baab-ovn-controller-tls-certs\") pod \"ovn-controller-mplg4\" (UID: \"c5001332-068e-46eb-a21c-25e29832baab\") " pod="openstack/ovn-controller-mplg4" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.877897 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5001332-068e-46eb-a21c-25e29832baab-combined-ca-bundle\") pod \"ovn-controller-mplg4\" (UID: \"c5001332-068e-46eb-a21c-25e29832baab\") " pod="openstack/ovn-controller-mplg4" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.877915 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c5001332-068e-46eb-a21c-25e29832baab-var-log-ovn\") pod \"ovn-controller-mplg4\" (UID: \"c5001332-068e-46eb-a21c-25e29832baab\") " pod="openstack/ovn-controller-mplg4" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.877943 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c5001332-068e-46eb-a21c-25e29832baab-var-run-ovn\") pod \"ovn-controller-mplg4\" (UID: \"c5001332-068e-46eb-a21c-25e29832baab\") " pod="openstack/ovn-controller-mplg4" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.877961 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c5001332-068e-46eb-a21c-25e29832baab-var-run\") pod \"ovn-controller-mplg4\" (UID: \"c5001332-068e-46eb-a21c-25e29832baab\") " pod="openstack/ovn-controller-mplg4" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.881910 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.979662 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/34fe2d2b-33b8-4736-98c8-3b7ae70118dd-var-log\") pod \"ovn-controller-ovs-wmxcl\" (UID: \"34fe2d2b-33b8-4736-98c8-3b7ae70118dd\") " pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.979699 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/34fe2d2b-33b8-4736-98c8-3b7ae70118dd-etc-ovs\") pod \"ovn-controller-ovs-wmxcl\" (UID: \"34fe2d2b-33b8-4736-98c8-3b7ae70118dd\") " pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.979755 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92mn9\" (UniqueName: \"kubernetes.io/projected/dff60953-8a38-41cb-bc21-6192798508a1-kube-api-access-92mn9\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.979806 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dff60953-8a38-41cb-bc21-6192798508a1-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.979827 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dff60953-8a38-41cb-bc21-6192798508a1-config\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.979852 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5001332-068e-46eb-a21c-25e29832baab-combined-ca-bundle\") pod \"ovn-controller-mplg4\" (UID: \"c5001332-068e-46eb-a21c-25e29832baab\") " pod="openstack/ovn-controller-mplg4" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.979871 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c5001332-068e-46eb-a21c-25e29832baab-var-log-ovn\") pod \"ovn-controller-mplg4\" (UID: \"c5001332-068e-46eb-a21c-25e29832baab\") " pod="openstack/ovn-controller-mplg4" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.979889 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c5001332-068e-46eb-a21c-25e29832baab-var-run-ovn\") pod \"ovn-controller-mplg4\" (UID: \"c5001332-068e-46eb-a21c-25e29832baab\") " pod="openstack/ovn-controller-mplg4" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.979908 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c5001332-068e-46eb-a21c-25e29832baab-var-run\") pod \"ovn-controller-mplg4\" (UID: \"c5001332-068e-46eb-a21c-25e29832baab\") " pod="openstack/ovn-controller-mplg4" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.979951 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c5001332-068e-46eb-a21c-25e29832baab-scripts\") pod \"ovn-controller-mplg4\" (UID: \"c5001332-068e-46eb-a21c-25e29832baab\") " pod="openstack/ovn-controller-mplg4" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.979979 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9jgb\" (UniqueName: \"kubernetes.io/projected/34fe2d2b-33b8-4736-98c8-3b7ae70118dd-kube-api-access-z9jgb\") pod \"ovn-controller-ovs-wmxcl\" (UID: \"34fe2d2b-33b8-4736-98c8-3b7ae70118dd\") " pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.979995 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/dff60953-8a38-41cb-bc21-6192798508a1-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.980014 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.980037 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/34fe2d2b-33b8-4736-98c8-3b7ae70118dd-scripts\") pod \"ovn-controller-ovs-wmxcl\" (UID: \"34fe2d2b-33b8-4736-98c8-3b7ae70118dd\") " pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.980055 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dff60953-8a38-41cb-bc21-6192798508a1-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.980071 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/dff60953-8a38-41cb-bc21-6192798508a1-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.980086 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/34fe2d2b-33b8-4736-98c8-3b7ae70118dd-var-run\") pod \"ovn-controller-ovs-wmxcl\" (UID: \"34fe2d2b-33b8-4736-98c8-3b7ae70118dd\") " pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.980126 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dff60953-8a38-41cb-bc21-6192798508a1-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.980143 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rm9zp\" (UniqueName: \"kubernetes.io/projected/c5001332-068e-46eb-a21c-25e29832baab-kube-api-access-rm9zp\") pod \"ovn-controller-mplg4\" (UID: \"c5001332-068e-46eb-a21c-25e29832baab\") " pod="openstack/ovn-controller-mplg4" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.980168 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/34fe2d2b-33b8-4736-98c8-3b7ae70118dd-var-lib\") pod \"ovn-controller-ovs-wmxcl\" (UID: \"34fe2d2b-33b8-4736-98c8-3b7ae70118dd\") " pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.980185 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5001332-068e-46eb-a21c-25e29832baab-ovn-controller-tls-certs\") pod \"ovn-controller-mplg4\" (UID: \"c5001332-068e-46eb-a21c-25e29832baab\") " pod="openstack/ovn-controller-mplg4" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.985126 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c5001332-068e-46eb-a21c-25e29832baab-var-log-ovn\") pod \"ovn-controller-mplg4\" (UID: \"c5001332-068e-46eb-a21c-25e29832baab\") " pod="openstack/ovn-controller-mplg4" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.985273 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c5001332-068e-46eb-a21c-25e29832baab-var-run-ovn\") pod \"ovn-controller-mplg4\" (UID: \"c5001332-068e-46eb-a21c-25e29832baab\") " pod="openstack/ovn-controller-mplg4" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.985346 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c5001332-068e-46eb-a21c-25e29832baab-var-run\") pod \"ovn-controller-mplg4\" (UID: \"c5001332-068e-46eb-a21c-25e29832baab\") " pod="openstack/ovn-controller-mplg4" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.986757 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5001332-068e-46eb-a21c-25e29832baab-ovn-controller-tls-certs\") pod \"ovn-controller-mplg4\" (UID: \"c5001332-068e-46eb-a21c-25e29832baab\") " pod="openstack/ovn-controller-mplg4" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.987505 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c5001332-068e-46eb-a21c-25e29832baab-scripts\") pod \"ovn-controller-mplg4\" (UID: \"c5001332-068e-46eb-a21c-25e29832baab\") " pod="openstack/ovn-controller-mplg4" Oct 01 05:45:20 crc kubenswrapper[4661]: I1001 05:45:20.989820 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5001332-068e-46eb-a21c-25e29832baab-combined-ca-bundle\") pod \"ovn-controller-mplg4\" (UID: \"c5001332-068e-46eb-a21c-25e29832baab\") " pod="openstack/ovn-controller-mplg4" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.021235 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rm9zp\" (UniqueName: \"kubernetes.io/projected/c5001332-068e-46eb-a21c-25e29832baab-kube-api-access-rm9zp\") pod \"ovn-controller-mplg4\" (UID: \"c5001332-068e-46eb-a21c-25e29832baab\") " pod="openstack/ovn-controller-mplg4" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.084512 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/34fe2d2b-33b8-4736-98c8-3b7ae70118dd-scripts\") pod \"ovn-controller-ovs-wmxcl\" (UID: \"34fe2d2b-33b8-4736-98c8-3b7ae70118dd\") " pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.084551 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dff60953-8a38-41cb-bc21-6192798508a1-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.084574 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/dff60953-8a38-41cb-bc21-6192798508a1-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.084593 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/34fe2d2b-33b8-4736-98c8-3b7ae70118dd-var-run\") pod \"ovn-controller-ovs-wmxcl\" (UID: \"34fe2d2b-33b8-4736-98c8-3b7ae70118dd\") " pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.084625 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dff60953-8a38-41cb-bc21-6192798508a1-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.084653 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/34fe2d2b-33b8-4736-98c8-3b7ae70118dd-var-lib\") pod \"ovn-controller-ovs-wmxcl\" (UID: \"34fe2d2b-33b8-4736-98c8-3b7ae70118dd\") " pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.084689 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/34fe2d2b-33b8-4736-98c8-3b7ae70118dd-var-log\") pod \"ovn-controller-ovs-wmxcl\" (UID: \"34fe2d2b-33b8-4736-98c8-3b7ae70118dd\") " pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.084708 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/34fe2d2b-33b8-4736-98c8-3b7ae70118dd-etc-ovs\") pod \"ovn-controller-ovs-wmxcl\" (UID: \"34fe2d2b-33b8-4736-98c8-3b7ae70118dd\") " pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.084724 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92mn9\" (UniqueName: \"kubernetes.io/projected/dff60953-8a38-41cb-bc21-6192798508a1-kube-api-access-92mn9\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.084742 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dff60953-8a38-41cb-bc21-6192798508a1-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.084760 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dff60953-8a38-41cb-bc21-6192798508a1-config\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.084815 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9jgb\" (UniqueName: \"kubernetes.io/projected/34fe2d2b-33b8-4736-98c8-3b7ae70118dd-kube-api-access-z9jgb\") pod \"ovn-controller-ovs-wmxcl\" (UID: \"34fe2d2b-33b8-4736-98c8-3b7ae70118dd\") " pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.084832 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/dff60953-8a38-41cb-bc21-6192798508a1-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.084853 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.085151 4661 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.087717 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/34fe2d2b-33b8-4736-98c8-3b7ae70118dd-var-log\") pod \"ovn-controller-ovs-wmxcl\" (UID: \"34fe2d2b-33b8-4736-98c8-3b7ae70118dd\") " pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.090162 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/34fe2d2b-33b8-4736-98c8-3b7ae70118dd-scripts\") pod \"ovn-controller-ovs-wmxcl\" (UID: \"34fe2d2b-33b8-4736-98c8-3b7ae70118dd\") " pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.093303 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dff60953-8a38-41cb-bc21-6192798508a1-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.101156 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/34fe2d2b-33b8-4736-98c8-3b7ae70118dd-var-lib\") pod \"ovn-controller-ovs-wmxcl\" (UID: \"34fe2d2b-33b8-4736-98c8-3b7ae70118dd\") " pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.101303 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/34fe2d2b-33b8-4736-98c8-3b7ae70118dd-etc-ovs\") pod \"ovn-controller-ovs-wmxcl\" (UID: \"34fe2d2b-33b8-4736-98c8-3b7ae70118dd\") " pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.101324 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dff60953-8a38-41cb-bc21-6192798508a1-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.101353 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dff60953-8a38-41cb-bc21-6192798508a1-config\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.101395 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/34fe2d2b-33b8-4736-98c8-3b7ae70118dd-var-run\") pod \"ovn-controller-ovs-wmxcl\" (UID: \"34fe2d2b-33b8-4736-98c8-3b7ae70118dd\") " pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.101830 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/dff60953-8a38-41cb-bc21-6192798508a1-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.102621 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/dff60953-8a38-41cb-bc21-6192798508a1-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.124266 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mplg4" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.139350 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9jgb\" (UniqueName: \"kubernetes.io/projected/34fe2d2b-33b8-4736-98c8-3b7ae70118dd-kube-api-access-z9jgb\") pod \"ovn-controller-ovs-wmxcl\" (UID: \"34fe2d2b-33b8-4736-98c8-3b7ae70118dd\") " pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.145658 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92mn9\" (UniqueName: \"kubernetes.io/projected/dff60953-8a38-41cb-bc21-6192798508a1-kube-api-access-92mn9\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.158817 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.159476 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dff60953-8a38-41cb-bc21-6192798508a1-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"dff60953-8a38-41cb-bc21-6192798508a1\") " pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.245219 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 01 05:45:21 crc kubenswrapper[4661]: I1001 05:45:21.437140 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.144978 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.151306 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.151762 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.153672 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.154173 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.154296 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.156390 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-sv7zv" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.251465 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flnk2\" (UniqueName: \"kubernetes.io/projected/6152c766-cf88-4b8c-9c8a-372dcdd4e62b-kube-api-access-flnk2\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.251522 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6152c766-cf88-4b8c-9c8a-372dcdd4e62b-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.251579 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6152c766-cf88-4b8c-9c8a-372dcdd4e62b-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.251715 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.251782 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6152c766-cf88-4b8c-9c8a-372dcdd4e62b-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.251830 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6152c766-cf88-4b8c-9c8a-372dcdd4e62b-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.251847 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6152c766-cf88-4b8c-9c8a-372dcdd4e62b-config\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.251907 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6152c766-cf88-4b8c-9c8a-372dcdd4e62b-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.358474 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6152c766-cf88-4b8c-9c8a-372dcdd4e62b-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.358555 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6152c766-cf88-4b8c-9c8a-372dcdd4e62b-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.358602 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.358642 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6152c766-cf88-4b8c-9c8a-372dcdd4e62b-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.358678 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6152c766-cf88-4b8c-9c8a-372dcdd4e62b-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.358696 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6152c766-cf88-4b8c-9c8a-372dcdd4e62b-config\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.358717 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6152c766-cf88-4b8c-9c8a-372dcdd4e62b-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.358754 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flnk2\" (UniqueName: \"kubernetes.io/projected/6152c766-cf88-4b8c-9c8a-372dcdd4e62b-kube-api-access-flnk2\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.358967 4661 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.359033 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6152c766-cf88-4b8c-9c8a-372dcdd4e62b-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.360285 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6152c766-cf88-4b8c-9c8a-372dcdd4e62b-config\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.361444 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6152c766-cf88-4b8c-9c8a-372dcdd4e62b-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.365129 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6152c766-cf88-4b8c-9c8a-372dcdd4e62b-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.365578 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6152c766-cf88-4b8c-9c8a-372dcdd4e62b-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.374621 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6152c766-cf88-4b8c-9c8a-372dcdd4e62b-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.376342 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flnk2\" (UniqueName: \"kubernetes.io/projected/6152c766-cf88-4b8c-9c8a-372dcdd4e62b-kube-api-access-flnk2\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.390060 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"6152c766-cf88-4b8c-9c8a-372dcdd4e62b\") " pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:24 crc kubenswrapper[4661]: I1001 05:45:24.480474 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 01 05:45:28 crc kubenswrapper[4661]: W1001 05:45:28.273546 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod12b98504_ea2f_4b12_a55e_5c3f514817a4.slice/crio-b86144ef7abcaf8537b65cd4eb8a1e36032edca0fa402f3485146f4b9e29a262 WatchSource:0}: Error finding container b86144ef7abcaf8537b65cd4eb8a1e36032edca0fa402f3485146f4b9e29a262: Status 404 returned error can't find the container with id b86144ef7abcaf8537b65cd4eb8a1e36032edca0fa402f3485146f4b9e29a262 Oct 01 05:45:28 crc kubenswrapper[4661]: I1001 05:45:28.590358 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"12b98504-ea2f-4b12-a55e-5c3f514817a4","Type":"ContainerStarted","Data":"b86144ef7abcaf8537b65cd4eb8a1e36032edca0fa402f3485146f4b9e29a262"} Oct 01 05:45:39 crc kubenswrapper[4661]: I1001 05:45:39.518322 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 01 05:45:39 crc kubenswrapper[4661]: I1001 05:45:39.611963 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-wmxcl"] Oct 01 05:45:45 crc kubenswrapper[4661]: E1001 05:45:45.073904 4661 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-mariadb:current" Oct 01 05:45:45 crc kubenswrapper[4661]: E1001 05:45:45.074522 4661 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-mariadb:current" Oct 01 05:45:45 crc kubenswrapper[4661]: E1001 05:45:45.074763 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.rdoproject.org/podified-master-centos10/openstack-mariadb:current,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:DB_ROOT_PASSWORD,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:DbRootPassword,Optional:nil,},},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:secrets,ReadOnly:true,MountPath:/var/lib/secrets,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-p6psn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(e892c883-6f23-415f-9e9d-bde45fefe01e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 05:45:45 crc kubenswrapper[4661]: E1001 05:45:45.076152 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="e892c883-6f23-415f-9e9d-bde45fefe01e" Oct 01 05:45:45 crc kubenswrapper[4661]: E1001 05:45:45.742779 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-mariadb:current\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="e892c883-6f23-415f-9e9d-bde45fefe01e" Oct 01 05:45:46 crc kubenswrapper[4661]: E1001 05:45:46.278986 4661 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current" Oct 01 05:45:46 crc kubenswrapper[4661]: E1001 05:45:46.279040 4661 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current" Oct 01 05:45:46 crc kubenswrapper[4661]: E1001 05:45:46.279168 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f2rjr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-notifications-server-0_openstack(1658ccd7-4bae-45bf-aa67-fc5c075a417c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 05:45:46 crc kubenswrapper[4661]: E1001 05:45:46.280651 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-notifications-server-0" podUID="1658ccd7-4bae-45bf-aa67-fc5c075a417c" Oct 01 05:45:46 crc kubenswrapper[4661]: E1001 05:45:46.299733 4661 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current" Oct 01 05:45:46 crc kubenswrapper[4661]: E1001 05:45:46.299802 4661 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current" Oct 01 05:45:46 crc kubenswrapper[4661]: E1001 05:45:46.299975 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pfxbs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(7b2acad5-a746-42a5-b9e8-a9904ad242bc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 05:45:46 crc kubenswrapper[4661]: E1001 05:45:46.301255 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="7b2acad5-a746-42a5-b9e8-a9904ad242bc" Oct 01 05:45:46 crc kubenswrapper[4661]: W1001 05:45:46.650812 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddff60953_8a38_41cb_bc21_6192798508a1.slice/crio-f98361f0287e19fc92a48aed552e59df1c64f0bc9a54453d1ff60216f9a10129 WatchSource:0}: Error finding container f98361f0287e19fc92a48aed552e59df1c64f0bc9a54453d1ff60216f9a10129: Status 404 returned error can't find the container with id f98361f0287e19fc92a48aed552e59df1c64f0bc9a54453d1ff60216f9a10129 Oct 01 05:45:46 crc kubenswrapper[4661]: I1001 05:45:46.753094 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"dff60953-8a38-41cb-bc21-6192798508a1","Type":"ContainerStarted","Data":"f98361f0287e19fc92a48aed552e59df1c64f0bc9a54453d1ff60216f9a10129"} Oct 01 05:45:46 crc kubenswrapper[4661]: I1001 05:45:46.754597 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-wmxcl" event={"ID":"34fe2d2b-33b8-4736-98c8-3b7ae70118dd","Type":"ContainerStarted","Data":"14f9578710d6ff9b39f271837d4196fe5f9c51e92c0ed2c38305ed0a346117c4"} Oct 01 05:45:46 crc kubenswrapper[4661]: E1001 05:45:46.757437 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current\\\"\"" pod="openstack/rabbitmq-server-0" podUID="7b2acad5-a746-42a5-b9e8-a9904ad242bc" Oct 01 05:45:46 crc kubenswrapper[4661]: E1001 05:45:46.758060 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current\\\"\"" pod="openstack/rabbitmq-notifications-server-0" podUID="1658ccd7-4bae-45bf-aa67-fc5c075a417c" Oct 01 05:45:47 crc kubenswrapper[4661]: E1001 05:45:47.392831 4661 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current" Oct 01 05:45:47 crc kubenswrapper[4661]: E1001 05:45:47.393786 4661 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current" Oct 01 05:45:47 crc kubenswrapper[4661]: E1001 05:45:47.393981 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-msvx8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-545d49fd5c-7c6nb_openstack(f9e40475-a6e7-40c4-9053-738327ed551b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 05:45:47 crc kubenswrapper[4661]: E1001 05:45:47.395275 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-545d49fd5c-7c6nb" podUID="f9e40475-a6e7-40c4-9053-738327ed551b" Oct 01 05:45:47 crc kubenswrapper[4661]: E1001 05:45:47.449602 4661 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current" Oct 01 05:45:47 crc kubenswrapper[4661]: E1001 05:45:47.449662 4661 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current" Oct 01 05:45:47 crc kubenswrapper[4661]: E1001 05:45:47.449778 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-phf7l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-86b8f4ff9-wcnwg_openstack(855ad1a6-cb42-4fdd-9c30-ccc290365265): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 05:45:47 crc kubenswrapper[4661]: E1001 05:45:47.450968 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-86b8f4ff9-wcnwg" podUID="855ad1a6-cb42-4fdd-9c30-ccc290365265" Oct 01 05:45:47 crc kubenswrapper[4661]: E1001 05:45:47.469601 4661 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current" Oct 01 05:45:47 crc kubenswrapper[4661]: E1001 05:45:47.469662 4661 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current" Oct 01 05:45:47 crc kubenswrapper[4661]: E1001 05:45:47.469835 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8xmk5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(31336b4a-1953-44ab-b229-401a3a3ac031): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 05:45:47 crc kubenswrapper[4661]: E1001 05:45:47.471975 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="31336b4a-1953-44ab-b229-401a3a3ac031" Oct 01 05:45:47 crc kubenswrapper[4661]: E1001 05:45:47.594309 4661 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current" Oct 01 05:45:47 crc kubenswrapper[4661]: E1001 05:45:47.594366 4661 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current" Oct 01 05:45:47 crc kubenswrapper[4661]: E1001 05:45:47.594491 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hp476,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-8468885bfc-2t2gk_openstack(92bebd22-d17b-4231-a472-46db2c691d3d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 05:45:47 crc kubenswrapper[4661]: E1001 05:45:47.595772 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-8468885bfc-2t2gk" podUID="92bebd22-d17b-4231-a472-46db2c691d3d" Oct 01 05:45:47 crc kubenswrapper[4661]: E1001 05:45:47.652317 4661 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current" Oct 01 05:45:47 crc kubenswrapper[4661]: E1001 05:45:47.652404 4661 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current" Oct 01 05:45:47 crc kubenswrapper[4661]: E1001 05:45:47.652602 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nfdh5dfhb6h64h676hc4h78h97h669h54chfbh696hb5h54bh5d4h6bh64h644h677h584h5cbh698h9dh5bbh5f8h5b8hcdh644h5c7h694hbfh589q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b5cwm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-9bd5d9d8c-57j5t_openstack(e8aef2be-9249-49a0-8f9f-71870656543c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 05:45:47 crc kubenswrapper[4661]: E1001 05:45:47.653813 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-9bd5d9d8c-57j5t" podUID="e8aef2be-9249-49a0-8f9f-71870656543c" Oct 01 05:45:47 crc kubenswrapper[4661]: E1001 05:45:47.762905 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="31336b4a-1953-44ab-b229-401a3a3ac031" Oct 01 05:45:47 crc kubenswrapper[4661]: E1001 05:45:47.763095 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current\\\"\"" pod="openstack/dnsmasq-dns-9bd5d9d8c-57j5t" podUID="e8aef2be-9249-49a0-8f9f-71870656543c" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.256130 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8468885bfc-2t2gk" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.267957 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86b8f4ff9-wcnwg" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.289893 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-545d49fd5c-7c6nb" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.417774 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92bebd22-d17b-4231-a472-46db2c691d3d-config\") pod \"92bebd22-d17b-4231-a472-46db2c691d3d\" (UID: \"92bebd22-d17b-4231-a472-46db2c691d3d\") " Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.417859 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9e40475-a6e7-40c4-9053-738327ed551b-config\") pod \"f9e40475-a6e7-40c4-9053-738327ed551b\" (UID: \"f9e40475-a6e7-40c4-9053-738327ed551b\") " Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.417910 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f9e40475-a6e7-40c4-9053-738327ed551b-dns-svc\") pod \"f9e40475-a6e7-40c4-9053-738327ed551b\" (UID: \"f9e40475-a6e7-40c4-9053-738327ed551b\") " Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.417929 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/855ad1a6-cb42-4fdd-9c30-ccc290365265-dns-svc\") pod \"855ad1a6-cb42-4fdd-9c30-ccc290365265\" (UID: \"855ad1a6-cb42-4fdd-9c30-ccc290365265\") " Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.417947 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/855ad1a6-cb42-4fdd-9c30-ccc290365265-config\") pod \"855ad1a6-cb42-4fdd-9c30-ccc290365265\" (UID: \"855ad1a6-cb42-4fdd-9c30-ccc290365265\") " Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.417983 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hp476\" (UniqueName: \"kubernetes.io/projected/92bebd22-d17b-4231-a472-46db2c691d3d-kube-api-access-hp476\") pod \"92bebd22-d17b-4231-a472-46db2c691d3d\" (UID: \"92bebd22-d17b-4231-a472-46db2c691d3d\") " Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.418026 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-phf7l\" (UniqueName: \"kubernetes.io/projected/855ad1a6-cb42-4fdd-9c30-ccc290365265-kube-api-access-phf7l\") pod \"855ad1a6-cb42-4fdd-9c30-ccc290365265\" (UID: \"855ad1a6-cb42-4fdd-9c30-ccc290365265\") " Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.418061 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-msvx8\" (UniqueName: \"kubernetes.io/projected/f9e40475-a6e7-40c4-9053-738327ed551b-kube-api-access-msvx8\") pod \"f9e40475-a6e7-40c4-9053-738327ed551b\" (UID: \"f9e40475-a6e7-40c4-9053-738327ed551b\") " Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.420389 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/855ad1a6-cb42-4fdd-9c30-ccc290365265-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "855ad1a6-cb42-4fdd-9c30-ccc290365265" (UID: "855ad1a6-cb42-4fdd-9c30-ccc290365265"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.421207 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/855ad1a6-cb42-4fdd-9c30-ccc290365265-config" (OuterVolumeSpecName: "config") pod "855ad1a6-cb42-4fdd-9c30-ccc290365265" (UID: "855ad1a6-cb42-4fdd-9c30-ccc290365265"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.421274 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92bebd22-d17b-4231-a472-46db2c691d3d-config" (OuterVolumeSpecName: "config") pod "92bebd22-d17b-4231-a472-46db2c691d3d" (UID: "92bebd22-d17b-4231-a472-46db2c691d3d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.421393 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9e40475-a6e7-40c4-9053-738327ed551b-config" (OuterVolumeSpecName: "config") pod "f9e40475-a6e7-40c4-9053-738327ed551b" (UID: "f9e40475-a6e7-40c4-9053-738327ed551b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.421697 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9e40475-a6e7-40c4-9053-738327ed551b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f9e40475-a6e7-40c4-9053-738327ed551b" (UID: "f9e40475-a6e7-40c4-9053-738327ed551b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.423070 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92bebd22-d17b-4231-a472-46db2c691d3d-kube-api-access-hp476" (OuterVolumeSpecName: "kube-api-access-hp476") pod "92bebd22-d17b-4231-a472-46db2c691d3d" (UID: "92bebd22-d17b-4231-a472-46db2c691d3d"). InnerVolumeSpecName "kube-api-access-hp476". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.483407 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-mplg4"] Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.516838 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9e40475-a6e7-40c4-9053-738327ed551b-kube-api-access-msvx8" (OuterVolumeSpecName: "kube-api-access-msvx8") pod "f9e40475-a6e7-40c4-9053-738327ed551b" (UID: "f9e40475-a6e7-40c4-9053-738327ed551b"). InnerVolumeSpecName "kube-api-access-msvx8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.518500 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/855ad1a6-cb42-4fdd-9c30-ccc290365265-kube-api-access-phf7l" (OuterVolumeSpecName: "kube-api-access-phf7l") pod "855ad1a6-cb42-4fdd-9c30-ccc290365265" (UID: "855ad1a6-cb42-4fdd-9c30-ccc290365265"). InnerVolumeSpecName "kube-api-access-phf7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.519334 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-msvx8\" (UniqueName: \"kubernetes.io/projected/f9e40475-a6e7-40c4-9053-738327ed551b-kube-api-access-msvx8\") on node \"crc\" DevicePath \"\"" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.519544 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92bebd22-d17b-4231-a472-46db2c691d3d-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.519555 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9e40475-a6e7-40c4-9053-738327ed551b-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.519566 4661 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f9e40475-a6e7-40c4-9053-738327ed551b-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.519574 4661 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/855ad1a6-cb42-4fdd-9c30-ccc290365265-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.519581 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/855ad1a6-cb42-4fdd-9c30-ccc290365265-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.519589 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hp476\" (UniqueName: \"kubernetes.io/projected/92bebd22-d17b-4231-a472-46db2c691d3d-kube-api-access-hp476\") on node \"crc\" DevicePath \"\"" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.519596 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-phf7l\" (UniqueName: \"kubernetes.io/projected/855ad1a6-cb42-4fdd-9c30-ccc290365265-kube-api-access-phf7l\") on node \"crc\" DevicePath \"\"" Oct 01 05:45:48 crc kubenswrapper[4661]: W1001 05:45:48.521440 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc5001332_068e_46eb_a21c_25e29832baab.slice/crio-b7ef12ed199a9954c45b0248c1fe0f980800615a50d1e50807beb51d6ec555ce WatchSource:0}: Error finding container b7ef12ed199a9954c45b0248c1fe0f980800615a50d1e50807beb51d6ec555ce: Status 404 returned error can't find the container with id b7ef12ed199a9954c45b0248c1fe0f980800615a50d1e50807beb51d6ec555ce Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.653369 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.769761 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-545d49fd5c-7c6nb" event={"ID":"f9e40475-a6e7-40c4-9053-738327ed551b","Type":"ContainerDied","Data":"ec11a236e69d03b0ee8f145e8defb7663c5596f367a6130a82389cee75de8eb5"} Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.769881 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-545d49fd5c-7c6nb" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.771257 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86b8f4ff9-wcnwg" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.771297 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86b8f4ff9-wcnwg" event={"ID":"855ad1a6-cb42-4fdd-9c30-ccc290365265","Type":"ContainerDied","Data":"add7a85a4c0fa3102462b28c999432e6320fac1ee6b9098a2e7f53ff3e4c8ba7"} Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.772925 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-mplg4" event={"ID":"c5001332-068e-46eb-a21c-25e29832baab","Type":"ContainerStarted","Data":"b7ef12ed199a9954c45b0248c1fe0f980800615a50d1e50807beb51d6ec555ce"} Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.773935 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8468885bfc-2t2gk" event={"ID":"92bebd22-d17b-4231-a472-46db2c691d3d","Type":"ContainerDied","Data":"31c262edfe883b36d3879e1810ae63e72ee896d0477271614de43f04c8e2b15c"} Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.774002 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8468885bfc-2t2gk" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.775754 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"0e50e984-837a-48c4-ac76-c62066f13512","Type":"ContainerStarted","Data":"e8b2efc7025af3adb7492b64093b096b47499d209e4e7699b66b207aab066bd1"} Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.775908 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.795400 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.452764632 podStartE2EDuration="32.795382199s" podCreationTimestamp="2025-10-01 05:45:16 +0000 UTC" firstStartedPulling="2025-10-01 05:45:17.706255657 +0000 UTC m=+966.644234271" lastFinishedPulling="2025-10-01 05:45:48.048873224 +0000 UTC m=+996.986851838" observedRunningTime="2025-10-01 05:45:48.791804491 +0000 UTC m=+997.729783115" watchObservedRunningTime="2025-10-01 05:45:48.795382199 +0000 UTC m=+997.733360813" Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.840025 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86b8f4ff9-wcnwg"] Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.850880 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86b8f4ff9-wcnwg"] Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.882926 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-545d49fd5c-7c6nb"] Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.893124 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-545d49fd5c-7c6nb"] Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.915347 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8468885bfc-2t2gk"] Oct 01 05:45:48 crc kubenswrapper[4661]: I1001 05:45:48.925520 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8468885bfc-2t2gk"] Oct 01 05:45:49 crc kubenswrapper[4661]: I1001 05:45:49.771710 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="855ad1a6-cb42-4fdd-9c30-ccc290365265" path="/var/lib/kubelet/pods/855ad1a6-cb42-4fdd-9c30-ccc290365265/volumes" Oct 01 05:45:49 crc kubenswrapper[4661]: I1001 05:45:49.772290 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92bebd22-d17b-4231-a472-46db2c691d3d" path="/var/lib/kubelet/pods/92bebd22-d17b-4231-a472-46db2c691d3d/volumes" Oct 01 05:45:49 crc kubenswrapper[4661]: I1001 05:45:49.772653 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9e40475-a6e7-40c4-9053-738327ed551b" path="/var/lib/kubelet/pods/f9e40475-a6e7-40c4-9053-738327ed551b/volumes" Oct 01 05:45:49 crc kubenswrapper[4661]: I1001 05:45:49.784770 4661 generic.go:334] "Generic (PLEG): container finished" podID="677c3be8-2587-44d1-8545-65238de20248" containerID="9a688bf9a0b39be5283f35c55011920252fa2c64a2cebfd67000ef1c6b6453ab" exitCode=0 Oct 01 05:45:49 crc kubenswrapper[4661]: I1001 05:45:49.784838 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5449989c59-nh67m" event={"ID":"677c3be8-2587-44d1-8545-65238de20248","Type":"ContainerDied","Data":"9a688bf9a0b39be5283f35c55011920252fa2c64a2cebfd67000ef1c6b6453ab"} Oct 01 05:45:49 crc kubenswrapper[4661]: I1001 05:45:49.788510 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"daca2202-a971-4201-81be-edef6f0c40f6","Type":"ContainerStarted","Data":"21791cb0226b7d93f9dc770bf745eb75c2fd60c80b8f0f0963fb78d296ebc25e"} Oct 01 05:45:49 crc kubenswrapper[4661]: I1001 05:45:49.792268 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"2741b07c-1750-4920-a734-2f51af08ac8b","Type":"ContainerStarted","Data":"7b51311f7f91cec09270d8366fda47d6e680a82930166518d6cf69727afbddf6"} Oct 01 05:45:49 crc kubenswrapper[4661]: I1001 05:45:49.792401 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 01 05:45:49 crc kubenswrapper[4661]: I1001 05:45:49.794793 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"6152c766-cf88-4b8c-9c8a-372dcdd4e62b","Type":"ContainerStarted","Data":"a59697a44248179261de0bc8ddd04793360339746ccc1514658673a2ead85c3c"} Oct 01 05:45:49 crc kubenswrapper[4661]: I1001 05:45:49.907178 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=3.254858187 podStartE2EDuration="35.907151689s" podCreationTimestamp="2025-10-01 05:45:14 +0000 UTC" firstStartedPulling="2025-10-01 05:45:15.611749494 +0000 UTC m=+964.549728118" lastFinishedPulling="2025-10-01 05:45:48.264043006 +0000 UTC m=+997.202021620" observedRunningTime="2025-10-01 05:45:49.897780761 +0000 UTC m=+998.835759385" watchObservedRunningTime="2025-10-01 05:45:49.907151689 +0000 UTC m=+998.845130323" Oct 01 05:45:50 crc kubenswrapper[4661]: I1001 05:45:50.806125 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"12b98504-ea2f-4b12-a55e-5c3f514817a4","Type":"ContainerStarted","Data":"2a44bb32803e50fb7c3b62fe4aa9440b3daaec63859584cbee4f9639a0a5151d"} Oct 01 05:45:50 crc kubenswrapper[4661]: I1001 05:45:50.810172 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5449989c59-nh67m" event={"ID":"677c3be8-2587-44d1-8545-65238de20248","Type":"ContainerStarted","Data":"1eb43813fa89e657ce65734a5fee0655b4a1968a6fddbd0b31d90d952dfe063e"} Oct 01 05:45:50 crc kubenswrapper[4661]: I1001 05:45:50.857286 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5449989c59-nh67m" podStartSLOduration=3.630547507 podStartE2EDuration="40.857197287s" podCreationTimestamp="2025-10-01 05:45:10 +0000 UTC" firstStartedPulling="2025-10-01 05:45:11.037443577 +0000 UTC m=+959.975422191" lastFinishedPulling="2025-10-01 05:45:48.264093357 +0000 UTC m=+997.202071971" observedRunningTime="2025-10-01 05:45:50.852418286 +0000 UTC m=+999.790396900" watchObservedRunningTime="2025-10-01 05:45:50.857197287 +0000 UTC m=+999.795175901" Oct 01 05:45:51 crc kubenswrapper[4661]: I1001 05:45:51.822531 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5449989c59-nh67m" Oct 01 05:45:54 crc kubenswrapper[4661]: I1001 05:45:54.863687 4661 generic.go:334] "Generic (PLEG): container finished" podID="34fe2d2b-33b8-4736-98c8-3b7ae70118dd" containerID="826b199f90d5e160e174299524cf9b470535b36abd036684d9286040b8ae64dc" exitCode=0 Oct 01 05:45:54 crc kubenswrapper[4661]: I1001 05:45:54.863746 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-wmxcl" event={"ID":"34fe2d2b-33b8-4736-98c8-3b7ae70118dd","Type":"ContainerDied","Data":"826b199f90d5e160e174299524cf9b470535b36abd036684d9286040b8ae64dc"} Oct 01 05:45:54 crc kubenswrapper[4661]: I1001 05:45:54.866025 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"dff60953-8a38-41cb-bc21-6192798508a1","Type":"ContainerStarted","Data":"e0f7e0a86d718ca30048afb8e8c791fb7c9497e96071899d07f2244a5586e0fa"} Oct 01 05:45:54 crc kubenswrapper[4661]: I1001 05:45:54.869715 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-mplg4" event={"ID":"c5001332-068e-46eb-a21c-25e29832baab","Type":"ContainerStarted","Data":"22665c78116c9a0421b0e5f56471f40c401f262d87bce595bd9bc5b112122342"} Oct 01 05:45:54 crc kubenswrapper[4661]: I1001 05:45:54.869810 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-mplg4" Oct 01 05:45:54 crc kubenswrapper[4661]: I1001 05:45:54.872474 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"6152c766-cf88-4b8c-9c8a-372dcdd4e62b","Type":"ContainerStarted","Data":"4f02a945aecc05ebf1f08ca34cc634361e5b0e398d6b0240364bdca43008e9d5"} Oct 01 05:45:54 crc kubenswrapper[4661]: I1001 05:45:54.916748 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-mplg4" podStartSLOduration=29.36587282 podStartE2EDuration="34.916712166s" podCreationTimestamp="2025-10-01 05:45:20 +0000 UTC" firstStartedPulling="2025-10-01 05:45:48.522993483 +0000 UTC m=+997.460972087" lastFinishedPulling="2025-10-01 05:45:54.073832779 +0000 UTC m=+1003.011811433" observedRunningTime="2025-10-01 05:45:54.912868761 +0000 UTC m=+1003.850847415" watchObservedRunningTime="2025-10-01 05:45:54.916712166 +0000 UTC m=+1003.854690860" Oct 01 05:45:55 crc kubenswrapper[4661]: I1001 05:45:55.157659 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 01 05:45:55 crc kubenswrapper[4661]: I1001 05:45:55.530960 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5449989c59-nh67m" Oct 01 05:45:55 crc kubenswrapper[4661]: I1001 05:45:55.623240 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9bd5d9d8c-57j5t"] Oct 01 05:45:55 crc kubenswrapper[4661]: I1001 05:45:55.911770 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9bd5d9d8c-57j5t" event={"ID":"e8aef2be-9249-49a0-8f9f-71870656543c","Type":"ContainerDied","Data":"540f6c3d8f764192c36f223cfd16e4aee6e736879682564bedd2297d50975774"} Oct 01 05:45:55 crc kubenswrapper[4661]: I1001 05:45:55.912039 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="540f6c3d8f764192c36f223cfd16e4aee6e736879682564bedd2297d50975774" Oct 01 05:45:55 crc kubenswrapper[4661]: I1001 05:45:55.917059 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-wmxcl" event={"ID":"34fe2d2b-33b8-4736-98c8-3b7ae70118dd","Type":"ContainerStarted","Data":"a3e39e0300acee625b54f898842f1f9cdc78effa8403b81e1024e11accc9dbcb"} Oct 01 05:45:55 crc kubenswrapper[4661]: I1001 05:45:55.917091 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-wmxcl" event={"ID":"34fe2d2b-33b8-4736-98c8-3b7ae70118dd","Type":"ContainerStarted","Data":"13a7de0de2a454163c8cd9f644ff98a54239ce4b6d7e2f078a1480706c614358"} Oct 01 05:45:55 crc kubenswrapper[4661]: I1001 05:45:55.917123 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:45:55 crc kubenswrapper[4661]: I1001 05:45:55.917141 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:45:55 crc kubenswrapper[4661]: I1001 05:45:55.920593 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9bd5d9d8c-57j5t" Oct 01 05:45:55 crc kubenswrapper[4661]: I1001 05:45:55.939518 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-wmxcl" podStartSLOduration=28.132951456 podStartE2EDuration="35.939503106s" podCreationTimestamp="2025-10-01 05:45:20 +0000 UTC" firstStartedPulling="2025-10-01 05:45:46.242837976 +0000 UTC m=+995.180816590" lastFinishedPulling="2025-10-01 05:45:54.049389586 +0000 UTC m=+1002.987368240" observedRunningTime="2025-10-01 05:45:55.93453128 +0000 UTC m=+1004.872509884" watchObservedRunningTime="2025-10-01 05:45:55.939503106 +0000 UTC m=+1004.877481720" Oct 01 05:45:55 crc kubenswrapper[4661]: I1001 05:45:55.969135 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8aef2be-9249-49a0-8f9f-71870656543c-dns-svc\") pod \"e8aef2be-9249-49a0-8f9f-71870656543c\" (UID: \"e8aef2be-9249-49a0-8f9f-71870656543c\") " Oct 01 05:45:55 crc kubenswrapper[4661]: I1001 05:45:55.969204 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8aef2be-9249-49a0-8f9f-71870656543c-config\") pod \"e8aef2be-9249-49a0-8f9f-71870656543c\" (UID: \"e8aef2be-9249-49a0-8f9f-71870656543c\") " Oct 01 05:45:55 crc kubenswrapper[4661]: I1001 05:45:55.969296 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5cwm\" (UniqueName: \"kubernetes.io/projected/e8aef2be-9249-49a0-8f9f-71870656543c-kube-api-access-b5cwm\") pod \"e8aef2be-9249-49a0-8f9f-71870656543c\" (UID: \"e8aef2be-9249-49a0-8f9f-71870656543c\") " Oct 01 05:45:55 crc kubenswrapper[4661]: I1001 05:45:55.969670 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8aef2be-9249-49a0-8f9f-71870656543c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e8aef2be-9249-49a0-8f9f-71870656543c" (UID: "e8aef2be-9249-49a0-8f9f-71870656543c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:45:55 crc kubenswrapper[4661]: I1001 05:45:55.970581 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8aef2be-9249-49a0-8f9f-71870656543c-config" (OuterVolumeSpecName: "config") pod "e8aef2be-9249-49a0-8f9f-71870656543c" (UID: "e8aef2be-9249-49a0-8f9f-71870656543c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:45:55 crc kubenswrapper[4661]: I1001 05:45:55.970843 4661 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8aef2be-9249-49a0-8f9f-71870656543c-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 05:45:55 crc kubenswrapper[4661]: I1001 05:45:55.970860 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8aef2be-9249-49a0-8f9f-71870656543c-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:45:55 crc kubenswrapper[4661]: I1001 05:45:55.974598 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8aef2be-9249-49a0-8f9f-71870656543c-kube-api-access-b5cwm" (OuterVolumeSpecName: "kube-api-access-b5cwm") pod "e8aef2be-9249-49a0-8f9f-71870656543c" (UID: "e8aef2be-9249-49a0-8f9f-71870656543c"). InnerVolumeSpecName "kube-api-access-b5cwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:45:56 crc kubenswrapper[4661]: I1001 05:45:56.072055 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5cwm\" (UniqueName: \"kubernetes.io/projected/e8aef2be-9249-49a0-8f9f-71870656543c-kube-api-access-b5cwm\") on node \"crc\" DevicePath \"\"" Oct 01 05:45:56 crc kubenswrapper[4661]: I1001 05:45:56.940777 4661 generic.go:334] "Generic (PLEG): container finished" podID="daca2202-a971-4201-81be-edef6f0c40f6" containerID="21791cb0226b7d93f9dc770bf745eb75c2fd60c80b8f0f0963fb78d296ebc25e" exitCode=0 Oct 01 05:45:56 crc kubenswrapper[4661]: I1001 05:45:56.941209 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9bd5d9d8c-57j5t" Oct 01 05:45:56 crc kubenswrapper[4661]: I1001 05:45:56.940877 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"daca2202-a971-4201-81be-edef6f0c40f6","Type":"ContainerDied","Data":"21791cb0226b7d93f9dc770bf745eb75c2fd60c80b8f0f0963fb78d296ebc25e"} Oct 01 05:45:56 crc kubenswrapper[4661]: I1001 05:45:56.993777 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-547b55867-bv6js"] Oct 01 05:45:56 crc kubenswrapper[4661]: I1001 05:45:56.995090 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-547b55867-bv6js" Oct 01 05:45:57 crc kubenswrapper[4661]: I1001 05:45:57.000941 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 01 05:45:57 crc kubenswrapper[4661]: I1001 05:45:57.022902 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-547b55867-bv6js"] Oct 01 05:45:57 crc kubenswrapper[4661]: I1001 05:45:57.040584 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9bd5d9d8c-57j5t"] Oct 01 05:45:57 crc kubenswrapper[4661]: I1001 05:45:57.047022 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-9bd5d9d8c-57j5t"] Oct 01 05:45:57 crc kubenswrapper[4661]: I1001 05:45:57.103742 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7af783b5-defe-48af-9d81-5941b6cd9cff-config\") pod \"dnsmasq-dns-547b55867-bv6js\" (UID: \"7af783b5-defe-48af-9d81-5941b6cd9cff\") " pod="openstack/dnsmasq-dns-547b55867-bv6js" Oct 01 05:45:57 crc kubenswrapper[4661]: I1001 05:45:57.103800 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvrst\" (UniqueName: \"kubernetes.io/projected/7af783b5-defe-48af-9d81-5941b6cd9cff-kube-api-access-nvrst\") pod \"dnsmasq-dns-547b55867-bv6js\" (UID: \"7af783b5-defe-48af-9d81-5941b6cd9cff\") " pod="openstack/dnsmasq-dns-547b55867-bv6js" Oct 01 05:45:57 crc kubenswrapper[4661]: I1001 05:45:57.104273 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7af783b5-defe-48af-9d81-5941b6cd9cff-dns-svc\") pod \"dnsmasq-dns-547b55867-bv6js\" (UID: \"7af783b5-defe-48af-9d81-5941b6cd9cff\") " pod="openstack/dnsmasq-dns-547b55867-bv6js" Oct 01 05:45:57 crc kubenswrapper[4661]: I1001 05:45:57.205764 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7af783b5-defe-48af-9d81-5941b6cd9cff-config\") pod \"dnsmasq-dns-547b55867-bv6js\" (UID: \"7af783b5-defe-48af-9d81-5941b6cd9cff\") " pod="openstack/dnsmasq-dns-547b55867-bv6js" Oct 01 05:45:57 crc kubenswrapper[4661]: I1001 05:45:57.205807 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvrst\" (UniqueName: \"kubernetes.io/projected/7af783b5-defe-48af-9d81-5941b6cd9cff-kube-api-access-nvrst\") pod \"dnsmasq-dns-547b55867-bv6js\" (UID: \"7af783b5-defe-48af-9d81-5941b6cd9cff\") " pod="openstack/dnsmasq-dns-547b55867-bv6js" Oct 01 05:45:57 crc kubenswrapper[4661]: I1001 05:45:57.205857 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7af783b5-defe-48af-9d81-5941b6cd9cff-dns-svc\") pod \"dnsmasq-dns-547b55867-bv6js\" (UID: \"7af783b5-defe-48af-9d81-5941b6cd9cff\") " pod="openstack/dnsmasq-dns-547b55867-bv6js" Oct 01 05:45:57 crc kubenswrapper[4661]: I1001 05:45:57.206695 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7af783b5-defe-48af-9d81-5941b6cd9cff-dns-svc\") pod \"dnsmasq-dns-547b55867-bv6js\" (UID: \"7af783b5-defe-48af-9d81-5941b6cd9cff\") " pod="openstack/dnsmasq-dns-547b55867-bv6js" Oct 01 05:45:57 crc kubenswrapper[4661]: I1001 05:45:57.207877 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7af783b5-defe-48af-9d81-5941b6cd9cff-config\") pod \"dnsmasq-dns-547b55867-bv6js\" (UID: \"7af783b5-defe-48af-9d81-5941b6cd9cff\") " pod="openstack/dnsmasq-dns-547b55867-bv6js" Oct 01 05:45:57 crc kubenswrapper[4661]: I1001 05:45:57.223508 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvrst\" (UniqueName: \"kubernetes.io/projected/7af783b5-defe-48af-9d81-5941b6cd9cff-kube-api-access-nvrst\") pod \"dnsmasq-dns-547b55867-bv6js\" (UID: \"7af783b5-defe-48af-9d81-5941b6cd9cff\") " pod="openstack/dnsmasq-dns-547b55867-bv6js" Oct 01 05:45:57 crc kubenswrapper[4661]: I1001 05:45:57.317856 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-547b55867-bv6js" Oct 01 05:45:57 crc kubenswrapper[4661]: I1001 05:45:57.778153 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8aef2be-9249-49a0-8f9f-71870656543c" path="/var/lib/kubelet/pods/e8aef2be-9249-49a0-8f9f-71870656543c/volumes" Oct 01 05:45:57 crc kubenswrapper[4661]: I1001 05:45:57.955072 4661 generic.go:334] "Generic (PLEG): container finished" podID="12b98504-ea2f-4b12-a55e-5c3f514817a4" containerID="2a44bb32803e50fb7c3b62fe4aa9440b3daaec63859584cbee4f9639a0a5151d" exitCode=0 Oct 01 05:45:57 crc kubenswrapper[4661]: I1001 05:45:57.955120 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"12b98504-ea2f-4b12-a55e-5c3f514817a4","Type":"ContainerDied","Data":"2a44bb32803e50fb7c3b62fe4aa9440b3daaec63859584cbee4f9639a0a5151d"} Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.185464 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.195025 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.202807 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.202842 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-7vntn" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.203242 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.203685 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.217826 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.325895 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zhqn\" (UniqueName: \"kubernetes.io/projected/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-kube-api-access-8zhqn\") pod \"swift-storage-0\" (UID: \"e08e6861-2a19-4c40-8ed3-aeb2662d75bd\") " pod="openstack/swift-storage-0" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.325956 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-lock\") pod \"swift-storage-0\" (UID: \"e08e6861-2a19-4c40-8ed3-aeb2662d75bd\") " pod="openstack/swift-storage-0" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.325993 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-etc-swift\") pod \"swift-storage-0\" (UID: \"e08e6861-2a19-4c40-8ed3-aeb2662d75bd\") " pod="openstack/swift-storage-0" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.326019 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-cache\") pod \"swift-storage-0\" (UID: \"e08e6861-2a19-4c40-8ed3-aeb2662d75bd\") " pod="openstack/swift-storage-0" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.326107 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"e08e6861-2a19-4c40-8ed3-aeb2662d75bd\") " pod="openstack/swift-storage-0" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.428115 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zhqn\" (UniqueName: \"kubernetes.io/projected/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-kube-api-access-8zhqn\") pod \"swift-storage-0\" (UID: \"e08e6861-2a19-4c40-8ed3-aeb2662d75bd\") " pod="openstack/swift-storage-0" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.428196 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-lock\") pod \"swift-storage-0\" (UID: \"e08e6861-2a19-4c40-8ed3-aeb2662d75bd\") " pod="openstack/swift-storage-0" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.429922 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-etc-swift\") pod \"swift-storage-0\" (UID: \"e08e6861-2a19-4c40-8ed3-aeb2662d75bd\") " pod="openstack/swift-storage-0" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.429967 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-cache\") pod \"swift-storage-0\" (UID: \"e08e6861-2a19-4c40-8ed3-aeb2662d75bd\") " pod="openstack/swift-storage-0" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.430031 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"e08e6861-2a19-4c40-8ed3-aeb2662d75bd\") " pod="openstack/swift-storage-0" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.430444 4661 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"e08e6861-2a19-4c40-8ed3-aeb2662d75bd\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/swift-storage-0" Oct 01 05:45:58 crc kubenswrapper[4661]: E1001 05:45:58.430869 4661 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 01 05:45:58 crc kubenswrapper[4661]: E1001 05:45:58.430914 4661 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 01 05:45:58 crc kubenswrapper[4661]: E1001 05:45:58.430992 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-etc-swift podName:e08e6861-2a19-4c40-8ed3-aeb2662d75bd nodeName:}" failed. No retries permitted until 2025-10-01 05:45:58.930941479 +0000 UTC m=+1007.868920093 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-etc-swift") pod "swift-storage-0" (UID: "e08e6861-2a19-4c40-8ed3-aeb2662d75bd") : configmap "swift-ring-files" not found Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.431149 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-lock\") pod \"swift-storage-0\" (UID: \"e08e6861-2a19-4c40-8ed3-aeb2662d75bd\") " pod="openstack/swift-storage-0" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.431364 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-cache\") pod \"swift-storage-0\" (UID: \"e08e6861-2a19-4c40-8ed3-aeb2662d75bd\") " pod="openstack/swift-storage-0" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.460138 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zhqn\" (UniqueName: \"kubernetes.io/projected/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-kube-api-access-8zhqn\") pod \"swift-storage-0\" (UID: \"e08e6861-2a19-4c40-8ed3-aeb2662d75bd\") " pod="openstack/swift-storage-0" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.491333 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"e08e6861-2a19-4c40-8ed3-aeb2662d75bd\") " pod="openstack/swift-storage-0" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.801998 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-t28p6"] Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.803433 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.806370 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.806895 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.808291 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.821295 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-t28p6"] Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.935948 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-547b55867-bv6js"] Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.940435 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/66f7b863-7d30-41b4-882c-c982fafa148a-etc-swift\") pod \"swift-ring-rebalance-t28p6\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.940525 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-etc-swift\") pod \"swift-storage-0\" (UID: \"e08e6861-2a19-4c40-8ed3-aeb2662d75bd\") " pod="openstack/swift-storage-0" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.940558 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgsgf\" (UniqueName: \"kubernetes.io/projected/66f7b863-7d30-41b4-882c-c982fafa148a-kube-api-access-mgsgf\") pod \"swift-ring-rebalance-t28p6\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.940585 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66f7b863-7d30-41b4-882c-c982fafa148a-combined-ca-bundle\") pod \"swift-ring-rebalance-t28p6\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.940626 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/66f7b863-7d30-41b4-882c-c982fafa148a-dispersionconf\") pod \"swift-ring-rebalance-t28p6\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.940670 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/66f7b863-7d30-41b4-882c-c982fafa148a-ring-data-devices\") pod \"swift-ring-rebalance-t28p6\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.940689 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/66f7b863-7d30-41b4-882c-c982fafa148a-swiftconf\") pod \"swift-ring-rebalance-t28p6\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.940706 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66f7b863-7d30-41b4-882c-c982fafa148a-scripts\") pod \"swift-ring-rebalance-t28p6\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:58 crc kubenswrapper[4661]: E1001 05:45:58.940856 4661 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 01 05:45:58 crc kubenswrapper[4661]: E1001 05:45:58.940874 4661 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 01 05:45:58 crc kubenswrapper[4661]: E1001 05:45:58.940912 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-etc-swift podName:e08e6861-2a19-4c40-8ed3-aeb2662d75bd nodeName:}" failed. No retries permitted until 2025-10-01 05:45:59.940897404 +0000 UTC m=+1008.878876018 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-etc-swift") pod "swift-storage-0" (UID: "e08e6861-2a19-4c40-8ed3-aeb2662d75bd") : configmap "swift-ring-files" not found Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.982183 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"daca2202-a971-4201-81be-edef6f0c40f6","Type":"ContainerStarted","Data":"94b7a212f4cb29d798864f61f53c1932cd54f3cf9ea8acc1dce190bed62d6129"} Oct 01 05:45:58 crc kubenswrapper[4661]: I1001 05:45:58.990262 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-547b55867-bv6js" event={"ID":"7af783b5-defe-48af-9d81-5941b6cd9cff","Type":"ContainerStarted","Data":"d04a4e76a6b7ec1b87b67f67ec2be2ff0275d0aab69c418102b70ae59b36c16e"} Oct 01 05:45:59 crc kubenswrapper[4661]: I1001 05:45:59.005667 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=13.416689611 podStartE2EDuration="47.005625746s" podCreationTimestamp="2025-10-01 05:45:12 +0000 UTC" firstStartedPulling="2025-10-01 05:45:14.675197323 +0000 UTC m=+963.613175937" lastFinishedPulling="2025-10-01 05:45:48.264133458 +0000 UTC m=+997.202112072" observedRunningTime="2025-10-01 05:45:59.004757042 +0000 UTC m=+1007.942735656" watchObservedRunningTime="2025-10-01 05:45:59.005625746 +0000 UTC m=+1007.943604370" Oct 01 05:45:59 crc kubenswrapper[4661]: I1001 05:45:59.015791 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e892c883-6f23-415f-9e9d-bde45fefe01e","Type":"ContainerStarted","Data":"a008d0a1703a103911b89ed462c418ed4ee4538e583c895f482be7ec484af1a8"} Oct 01 05:45:59 crc kubenswrapper[4661]: I1001 05:45:59.042400 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/66f7b863-7d30-41b4-882c-c982fafa148a-etc-swift\") pod \"swift-ring-rebalance-t28p6\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:59 crc kubenswrapper[4661]: I1001 05:45:59.042487 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgsgf\" (UniqueName: \"kubernetes.io/projected/66f7b863-7d30-41b4-882c-c982fafa148a-kube-api-access-mgsgf\") pod \"swift-ring-rebalance-t28p6\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:59 crc kubenswrapper[4661]: I1001 05:45:59.042517 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66f7b863-7d30-41b4-882c-c982fafa148a-combined-ca-bundle\") pod \"swift-ring-rebalance-t28p6\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:59 crc kubenswrapper[4661]: I1001 05:45:59.042556 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/66f7b863-7d30-41b4-882c-c982fafa148a-dispersionconf\") pod \"swift-ring-rebalance-t28p6\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:59 crc kubenswrapper[4661]: I1001 05:45:59.042622 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/66f7b863-7d30-41b4-882c-c982fafa148a-ring-data-devices\") pod \"swift-ring-rebalance-t28p6\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:59 crc kubenswrapper[4661]: I1001 05:45:59.042655 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/66f7b863-7d30-41b4-882c-c982fafa148a-swiftconf\") pod \"swift-ring-rebalance-t28p6\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:59 crc kubenswrapper[4661]: I1001 05:45:59.042676 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66f7b863-7d30-41b4-882c-c982fafa148a-scripts\") pod \"swift-ring-rebalance-t28p6\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:59 crc kubenswrapper[4661]: I1001 05:45:59.044202 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/66f7b863-7d30-41b4-882c-c982fafa148a-etc-swift\") pod \"swift-ring-rebalance-t28p6\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:59 crc kubenswrapper[4661]: I1001 05:45:59.044233 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/66f7b863-7d30-41b4-882c-c982fafa148a-ring-data-devices\") pod \"swift-ring-rebalance-t28p6\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:59 crc kubenswrapper[4661]: I1001 05:45:59.044767 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66f7b863-7d30-41b4-882c-c982fafa148a-scripts\") pod \"swift-ring-rebalance-t28p6\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:59 crc kubenswrapper[4661]: I1001 05:45:59.049084 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/66f7b863-7d30-41b4-882c-c982fafa148a-swiftconf\") pod \"swift-ring-rebalance-t28p6\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:59 crc kubenswrapper[4661]: I1001 05:45:59.052924 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/66f7b863-7d30-41b4-882c-c982fafa148a-dispersionconf\") pod \"swift-ring-rebalance-t28p6\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:59 crc kubenswrapper[4661]: I1001 05:45:59.053895 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66f7b863-7d30-41b4-882c-c982fafa148a-combined-ca-bundle\") pod \"swift-ring-rebalance-t28p6\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:59 crc kubenswrapper[4661]: I1001 05:45:59.062945 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgsgf\" (UniqueName: \"kubernetes.io/projected/66f7b863-7d30-41b4-882c-c982fafa148a-kube-api-access-mgsgf\") pod \"swift-ring-rebalance-t28p6\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:59 crc kubenswrapper[4661]: I1001 05:45:59.126524 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:45:59 crc kubenswrapper[4661]: I1001 05:45:59.615091 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-t28p6"] Oct 01 05:45:59 crc kubenswrapper[4661]: I1001 05:45:59.958602 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-etc-swift\") pod \"swift-storage-0\" (UID: \"e08e6861-2a19-4c40-8ed3-aeb2662d75bd\") " pod="openstack/swift-storage-0" Oct 01 05:45:59 crc kubenswrapper[4661]: E1001 05:45:59.959166 4661 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 01 05:45:59 crc kubenswrapper[4661]: E1001 05:45:59.959187 4661 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 01 05:45:59 crc kubenswrapper[4661]: E1001 05:45:59.959227 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-etc-swift podName:e08e6861-2a19-4c40-8ed3-aeb2662d75bd nodeName:}" failed. No retries permitted until 2025-10-01 05:46:01.959213871 +0000 UTC m=+1010.897192485 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-etc-swift") pod "swift-storage-0" (UID: "e08e6861-2a19-4c40-8ed3-aeb2662d75bd") : configmap "swift-ring-files" not found Oct 01 05:46:00 crc kubenswrapper[4661]: I1001 05:46:00.024971 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"dff60953-8a38-41cb-bc21-6192798508a1","Type":"ContainerStarted","Data":"f804f91e815f5582437b6e488dd3ec1381775bcebf1551beb1a29deb9815f6c9"} Oct 01 05:46:00 crc kubenswrapper[4661]: I1001 05:46:00.027444 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"6152c766-cf88-4b8c-9c8a-372dcdd4e62b","Type":"ContainerStarted","Data":"dd09a9fd876f4f071135ab3bd3c666f51e4dcb480fc4106058e4ec096f6b64b4"} Oct 01 05:46:00 crc kubenswrapper[4661]: I1001 05:46:00.029267 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-t28p6" event={"ID":"66f7b863-7d30-41b4-882c-c982fafa148a","Type":"ContainerStarted","Data":"3e493c747279b9e46b419380ef503921782b60035549578149bde10e27a9cb8f"} Oct 01 05:46:00 crc kubenswrapper[4661]: I1001 05:46:00.030742 4661 generic.go:334] "Generic (PLEG): container finished" podID="7af783b5-defe-48af-9d81-5941b6cd9cff" containerID="437202249b5ada7694e2a92613eb71dc90f3a55d23dfbf0203b29d759043e834" exitCode=0 Oct 01 05:46:00 crc kubenswrapper[4661]: I1001 05:46:00.030777 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-547b55867-bv6js" event={"ID":"7af783b5-defe-48af-9d81-5941b6cd9cff","Type":"ContainerDied","Data":"437202249b5ada7694e2a92613eb71dc90f3a55d23dfbf0203b29d759043e834"} Oct 01 05:46:00 crc kubenswrapper[4661]: I1001 05:46:00.046130 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=28.922548768 podStartE2EDuration="41.046111263s" podCreationTimestamp="2025-10-01 05:45:19 +0000 UTC" firstStartedPulling="2025-10-01 05:45:46.656030049 +0000 UTC m=+995.594008693" lastFinishedPulling="2025-10-01 05:45:58.779592564 +0000 UTC m=+1007.717571188" observedRunningTime="2025-10-01 05:46:00.043611315 +0000 UTC m=+1008.981589929" watchObservedRunningTime="2025-10-01 05:46:00.046111263 +0000 UTC m=+1008.984089887" Oct 01 05:46:00 crc kubenswrapper[4661]: I1001 05:46:00.073561 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=27.185438198 podStartE2EDuration="37.073543248s" podCreationTimestamp="2025-10-01 05:45:23 +0000 UTC" firstStartedPulling="2025-10-01 05:45:48.845732565 +0000 UTC m=+997.783711179" lastFinishedPulling="2025-10-01 05:45:58.733837605 +0000 UTC m=+1007.671816229" observedRunningTime="2025-10-01 05:46:00.068074997 +0000 UTC m=+1009.006053611" watchObservedRunningTime="2025-10-01 05:46:00.073543248 +0000 UTC m=+1009.011521862" Oct 01 05:46:00 crc kubenswrapper[4661]: I1001 05:46:00.246278 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 01 05:46:00 crc kubenswrapper[4661]: I1001 05:46:00.319477 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 01 05:46:00 crc kubenswrapper[4661]: I1001 05:46:00.481863 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 01 05:46:00 crc kubenswrapper[4661]: I1001 05:46:00.574584 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.039373 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-547b55867-bv6js" event={"ID":"7af783b5-defe-48af-9d81-5941b6cd9cff","Type":"ContainerStarted","Data":"e791ba32501c4cda2d08d1ef9b43310c04f3fdb78eda7f3a2af02e89fe4669cd"} Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.040724 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.040766 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.066398 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-547b55867-bv6js" podStartSLOduration=5.066374464 podStartE2EDuration="5.066374464s" podCreationTimestamp="2025-10-01 05:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:46:01.061396967 +0000 UTC m=+1009.999375591" watchObservedRunningTime="2025-10-01 05:46:01.066374464 +0000 UTC m=+1010.004353088" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.088705 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.108804 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.337789 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-547b55867-bv6js"] Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.365031 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-757cddf575-5w84n"] Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.368317 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757cddf575-5w84n" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.370896 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.378275 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757cddf575-5w84n"] Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.399261 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-ovsdbserver-sb\") pod \"dnsmasq-dns-757cddf575-5w84n\" (UID: \"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91\") " pod="openstack/dnsmasq-dns-757cddf575-5w84n" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.399314 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-config\") pod \"dnsmasq-dns-757cddf575-5w84n\" (UID: \"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91\") " pod="openstack/dnsmasq-dns-757cddf575-5w84n" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.399356 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2p7v\" (UniqueName: \"kubernetes.io/projected/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-kube-api-access-h2p7v\") pod \"dnsmasq-dns-757cddf575-5w84n\" (UID: \"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91\") " pod="openstack/dnsmasq-dns-757cddf575-5w84n" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.399404 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-dns-svc\") pod \"dnsmasq-dns-757cddf575-5w84n\" (UID: \"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91\") " pod="openstack/dnsmasq-dns-757cddf575-5w84n" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.429807 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-hlpwz"] Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.430929 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-hlpwz" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.434764 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-hlpwz"] Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.435011 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.492717 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.494571 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.498639 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.498848 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.498960 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.499347 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-5bjcq" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.500428 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2p7v\" (UniqueName: \"kubernetes.io/projected/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-kube-api-access-h2p7v\") pod \"dnsmasq-dns-757cddf575-5w84n\" (UID: \"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91\") " pod="openstack/dnsmasq-dns-757cddf575-5w84n" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.500519 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-dns-svc\") pod \"dnsmasq-dns-757cddf575-5w84n\" (UID: \"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91\") " pod="openstack/dnsmasq-dns-757cddf575-5w84n" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.500570 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c93ef988-b9e9-4cfc-950b-2b3060b6e4b8-combined-ca-bundle\") pod \"ovn-controller-metrics-hlpwz\" (UID: \"c93ef988-b9e9-4cfc-950b-2b3060b6e4b8\") " pod="openstack/ovn-controller-metrics-hlpwz" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.500590 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mvg2\" (UniqueName: \"kubernetes.io/projected/c93ef988-b9e9-4cfc-950b-2b3060b6e4b8-kube-api-access-8mvg2\") pod \"ovn-controller-metrics-hlpwz\" (UID: \"c93ef988-b9e9-4cfc-950b-2b3060b6e4b8\") " pod="openstack/ovn-controller-metrics-hlpwz" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.500645 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-ovsdbserver-sb\") pod \"dnsmasq-dns-757cddf575-5w84n\" (UID: \"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91\") " pod="openstack/dnsmasq-dns-757cddf575-5w84n" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.500678 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c93ef988-b9e9-4cfc-950b-2b3060b6e4b8-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-hlpwz\" (UID: \"c93ef988-b9e9-4cfc-950b-2b3060b6e4b8\") " pod="openstack/ovn-controller-metrics-hlpwz" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.500700 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c93ef988-b9e9-4cfc-950b-2b3060b6e4b8-config\") pod \"ovn-controller-metrics-hlpwz\" (UID: \"c93ef988-b9e9-4cfc-950b-2b3060b6e4b8\") " pod="openstack/ovn-controller-metrics-hlpwz" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.500715 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-config\") pod \"dnsmasq-dns-757cddf575-5w84n\" (UID: \"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91\") " pod="openstack/dnsmasq-dns-757cddf575-5w84n" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.500730 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/c93ef988-b9e9-4cfc-950b-2b3060b6e4b8-ovs-rundir\") pod \"ovn-controller-metrics-hlpwz\" (UID: \"c93ef988-b9e9-4cfc-950b-2b3060b6e4b8\") " pod="openstack/ovn-controller-metrics-hlpwz" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.500755 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/c93ef988-b9e9-4cfc-950b-2b3060b6e4b8-ovn-rundir\") pod \"ovn-controller-metrics-hlpwz\" (UID: \"c93ef988-b9e9-4cfc-950b-2b3060b6e4b8\") " pod="openstack/ovn-controller-metrics-hlpwz" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.501728 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-dns-svc\") pod \"dnsmasq-dns-757cddf575-5w84n\" (UID: \"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91\") " pod="openstack/dnsmasq-dns-757cddf575-5w84n" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.502285 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-ovsdbserver-sb\") pod \"dnsmasq-dns-757cddf575-5w84n\" (UID: \"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91\") " pod="openstack/dnsmasq-dns-757cddf575-5w84n" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.504187 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-config\") pod \"dnsmasq-dns-757cddf575-5w84n\" (UID: \"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91\") " pod="openstack/dnsmasq-dns-757cddf575-5w84n" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.522977 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.550654 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2p7v\" (UniqueName: \"kubernetes.io/projected/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-kube-api-access-h2p7v\") pod \"dnsmasq-dns-757cddf575-5w84n\" (UID: \"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91\") " pod="openstack/dnsmasq-dns-757cddf575-5w84n" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.551092 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757cddf575-5w84n"] Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.551622 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757cddf575-5w84n" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.598324 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-76f9c4c8bc-xbhc8"] Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.599866 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.602470 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c93ef988-b9e9-4cfc-950b-2b3060b6e4b8-combined-ca-bundle\") pod \"ovn-controller-metrics-hlpwz\" (UID: \"c93ef988-b9e9-4cfc-950b-2b3060b6e4b8\") " pod="openstack/ovn-controller-metrics-hlpwz" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.602519 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mvg2\" (UniqueName: \"kubernetes.io/projected/c93ef988-b9e9-4cfc-950b-2b3060b6e4b8-kube-api-access-8mvg2\") pod \"ovn-controller-metrics-hlpwz\" (UID: \"c93ef988-b9e9-4cfc-950b-2b3060b6e4b8\") " pod="openstack/ovn-controller-metrics-hlpwz" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.602553 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nq8fz\" (UniqueName: \"kubernetes.io/projected/8586024c-fe6a-4ccd-adc0-2e8e2a1bf823-kube-api-access-nq8fz\") pod \"ovn-northd-0\" (UID: \"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823\") " pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.602589 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8586024c-fe6a-4ccd-adc0-2e8e2a1bf823-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823\") " pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.602617 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8586024c-fe6a-4ccd-adc0-2e8e2a1bf823-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823\") " pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.602649 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c93ef988-b9e9-4cfc-950b-2b3060b6e4b8-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-hlpwz\" (UID: \"c93ef988-b9e9-4cfc-950b-2b3060b6e4b8\") " pod="openstack/ovn-controller-metrics-hlpwz" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.602673 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c93ef988-b9e9-4cfc-950b-2b3060b6e4b8-config\") pod \"ovn-controller-metrics-hlpwz\" (UID: \"c93ef988-b9e9-4cfc-950b-2b3060b6e4b8\") " pod="openstack/ovn-controller-metrics-hlpwz" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.602692 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/c93ef988-b9e9-4cfc-950b-2b3060b6e4b8-ovs-rundir\") pod \"ovn-controller-metrics-hlpwz\" (UID: \"c93ef988-b9e9-4cfc-950b-2b3060b6e4b8\") " pod="openstack/ovn-controller-metrics-hlpwz" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.602712 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/c93ef988-b9e9-4cfc-950b-2b3060b6e4b8-ovn-rundir\") pod \"ovn-controller-metrics-hlpwz\" (UID: \"c93ef988-b9e9-4cfc-950b-2b3060b6e4b8\") " pod="openstack/ovn-controller-metrics-hlpwz" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.602730 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8586024c-fe6a-4ccd-adc0-2e8e2a1bf823-config\") pod \"ovn-northd-0\" (UID: \"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823\") " pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.602753 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8586024c-fe6a-4ccd-adc0-2e8e2a1bf823-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823\") " pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.602767 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/8586024c-fe6a-4ccd-adc0-2e8e2a1bf823-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823\") " pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.602816 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8586024c-fe6a-4ccd-adc0-2e8e2a1bf823-scripts\") pod \"ovn-northd-0\" (UID: \"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823\") " pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.603898 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.604914 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/c93ef988-b9e9-4cfc-950b-2b3060b6e4b8-ovs-rundir\") pod \"ovn-controller-metrics-hlpwz\" (UID: \"c93ef988-b9e9-4cfc-950b-2b3060b6e4b8\") " pod="openstack/ovn-controller-metrics-hlpwz" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.604979 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/c93ef988-b9e9-4cfc-950b-2b3060b6e4b8-ovn-rundir\") pod \"ovn-controller-metrics-hlpwz\" (UID: \"c93ef988-b9e9-4cfc-950b-2b3060b6e4b8\") " pod="openstack/ovn-controller-metrics-hlpwz" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.605098 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c93ef988-b9e9-4cfc-950b-2b3060b6e4b8-config\") pod \"ovn-controller-metrics-hlpwz\" (UID: \"c93ef988-b9e9-4cfc-950b-2b3060b6e4b8\") " pod="openstack/ovn-controller-metrics-hlpwz" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.605930 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c93ef988-b9e9-4cfc-950b-2b3060b6e4b8-combined-ca-bundle\") pod \"ovn-controller-metrics-hlpwz\" (UID: \"c93ef988-b9e9-4cfc-950b-2b3060b6e4b8\") " pod="openstack/ovn-controller-metrics-hlpwz" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.613254 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76f9c4c8bc-xbhc8"] Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.617412 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c93ef988-b9e9-4cfc-950b-2b3060b6e4b8-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-hlpwz\" (UID: \"c93ef988-b9e9-4cfc-950b-2b3060b6e4b8\") " pod="openstack/ovn-controller-metrics-hlpwz" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.639612 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mvg2\" (UniqueName: \"kubernetes.io/projected/c93ef988-b9e9-4cfc-950b-2b3060b6e4b8-kube-api-access-8mvg2\") pod \"ovn-controller-metrics-hlpwz\" (UID: \"c93ef988-b9e9-4cfc-950b-2b3060b6e4b8\") " pod="openstack/ovn-controller-metrics-hlpwz" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.703929 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/8586024c-fe6a-4ccd-adc0-2e8e2a1bf823-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823\") " pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.703975 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8586024c-fe6a-4ccd-adc0-2e8e2a1bf823-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823\") " pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.704016 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-config\") pod \"dnsmasq-dns-76f9c4c8bc-xbhc8\" (UID: \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.704081 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzngg\" (UniqueName: \"kubernetes.io/projected/0ba34d64-f5f8-4543-91ad-deddaaa978fc-kube-api-access-wzngg\") pod \"dnsmasq-dns-76f9c4c8bc-xbhc8\" (UID: \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.704108 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8586024c-fe6a-4ccd-adc0-2e8e2a1bf823-scripts\") pod \"ovn-northd-0\" (UID: \"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823\") " pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.704127 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-ovsdbserver-nb\") pod \"dnsmasq-dns-76f9c4c8bc-xbhc8\" (UID: \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.704177 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-dns-svc\") pod \"dnsmasq-dns-76f9c4c8bc-xbhc8\" (UID: \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.704195 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-ovsdbserver-sb\") pod \"dnsmasq-dns-76f9c4c8bc-xbhc8\" (UID: \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.704213 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nq8fz\" (UniqueName: \"kubernetes.io/projected/8586024c-fe6a-4ccd-adc0-2e8e2a1bf823-kube-api-access-nq8fz\") pod \"ovn-northd-0\" (UID: \"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823\") " pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.704247 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8586024c-fe6a-4ccd-adc0-2e8e2a1bf823-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823\") " pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.704272 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8586024c-fe6a-4ccd-adc0-2e8e2a1bf823-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823\") " pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.704302 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8586024c-fe6a-4ccd-adc0-2e8e2a1bf823-config\") pod \"ovn-northd-0\" (UID: \"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823\") " pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.705292 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8586024c-fe6a-4ccd-adc0-2e8e2a1bf823-config\") pod \"ovn-northd-0\" (UID: \"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823\") " pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.705556 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8586024c-fe6a-4ccd-adc0-2e8e2a1bf823-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823\") " pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.707448 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8586024c-fe6a-4ccd-adc0-2e8e2a1bf823-scripts\") pod \"ovn-northd-0\" (UID: \"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823\") " pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.707465 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/8586024c-fe6a-4ccd-adc0-2e8e2a1bf823-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823\") " pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.709250 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8586024c-fe6a-4ccd-adc0-2e8e2a1bf823-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823\") " pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.712647 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8586024c-fe6a-4ccd-adc0-2e8e2a1bf823-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823\") " pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.787686 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-hlpwz" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.805961 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-config\") pod \"dnsmasq-dns-76f9c4c8bc-xbhc8\" (UID: \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.806012 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzngg\" (UniqueName: \"kubernetes.io/projected/0ba34d64-f5f8-4543-91ad-deddaaa978fc-kube-api-access-wzngg\") pod \"dnsmasq-dns-76f9c4c8bc-xbhc8\" (UID: \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.806045 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-ovsdbserver-nb\") pod \"dnsmasq-dns-76f9c4c8bc-xbhc8\" (UID: \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.806121 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-dns-svc\") pod \"dnsmasq-dns-76f9c4c8bc-xbhc8\" (UID: \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.806140 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-ovsdbserver-sb\") pod \"dnsmasq-dns-76f9c4c8bc-xbhc8\" (UID: \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.807269 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-config\") pod \"dnsmasq-dns-76f9c4c8bc-xbhc8\" (UID: \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.808244 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-ovsdbserver-nb\") pod \"dnsmasq-dns-76f9c4c8bc-xbhc8\" (UID: \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.808751 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-ovsdbserver-sb\") pod \"dnsmasq-dns-76f9c4c8bc-xbhc8\" (UID: \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.809205 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-dns-svc\") pod \"dnsmasq-dns-76f9c4c8bc-xbhc8\" (UID: \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.820099 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nq8fz\" (UniqueName: \"kubernetes.io/projected/8586024c-fe6a-4ccd-adc0-2e8e2a1bf823-kube-api-access-nq8fz\") pod \"ovn-northd-0\" (UID: \"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823\") " pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.820515 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.825184 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzngg\" (UniqueName: \"kubernetes.io/projected/0ba34d64-f5f8-4543-91ad-deddaaa978fc-kube-api-access-wzngg\") pod \"dnsmasq-dns-76f9c4c8bc-xbhc8\" (UID: \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" Oct 01 05:46:01 crc kubenswrapper[4661]: I1001 05:46:01.956013 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" Oct 01 05:46:02 crc kubenswrapper[4661]: I1001 05:46:02.009724 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-etc-swift\") pod \"swift-storage-0\" (UID: \"e08e6861-2a19-4c40-8ed3-aeb2662d75bd\") " pod="openstack/swift-storage-0" Oct 01 05:46:02 crc kubenswrapper[4661]: E1001 05:46:02.009926 4661 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 01 05:46:02 crc kubenswrapper[4661]: E1001 05:46:02.009978 4661 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 01 05:46:02 crc kubenswrapper[4661]: E1001 05:46:02.010059 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-etc-swift podName:e08e6861-2a19-4c40-8ed3-aeb2662d75bd nodeName:}" failed. No retries permitted until 2025-10-01 05:46:06.010016445 +0000 UTC m=+1014.947995059 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-etc-swift") pod "swift-storage-0" (UID: "e08e6861-2a19-4c40-8ed3-aeb2662d75bd") : configmap "swift-ring-files" not found Oct 01 05:46:02 crc kubenswrapper[4661]: I1001 05:46:02.049762 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-547b55867-bv6js" Oct 01 05:46:03 crc kubenswrapper[4661]: I1001 05:46:03.065096 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"1658ccd7-4bae-45bf-aa67-fc5c075a417c","Type":"ContainerStarted","Data":"85263945289c994cc57d153cf978cfa98d5db684e64dc605627246425cabbaee"} Oct 01 05:46:03 crc kubenswrapper[4661]: I1001 05:46:03.068609 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"31336b4a-1953-44ab-b229-401a3a3ac031","Type":"ContainerStarted","Data":"e4b706e5b85bcc690eca0586716d67b71f05d9134448ccee9f89b6082624960a"} Oct 01 05:46:03 crc kubenswrapper[4661]: I1001 05:46:03.069721 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-547b55867-bv6js" podUID="7af783b5-defe-48af-9d81-5941b6cd9cff" containerName="dnsmasq-dns" containerID="cri-o://e791ba32501c4cda2d08d1ef9b43310c04f3fdb78eda7f3a2af02e89fe4669cd" gracePeriod=10 Oct 01 05:46:03 crc kubenswrapper[4661]: E1001 05:46:03.483841 4661 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.65:51222->38.102.83.65:34747: write tcp 38.102.83.65:51222->38.102.83.65:34747: write: connection reset by peer Oct 01 05:46:04 crc kubenswrapper[4661]: I1001 05:46:04.077159 4661 generic.go:334] "Generic (PLEG): container finished" podID="7af783b5-defe-48af-9d81-5941b6cd9cff" containerID="e791ba32501c4cda2d08d1ef9b43310c04f3fdb78eda7f3a2af02e89fe4669cd" exitCode=0 Oct 01 05:46:04 crc kubenswrapper[4661]: I1001 05:46:04.077252 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-547b55867-bv6js" event={"ID":"7af783b5-defe-48af-9d81-5941b6cd9cff","Type":"ContainerDied","Data":"e791ba32501c4cda2d08d1ef9b43310c04f3fdb78eda7f3a2af02e89fe4669cd"} Oct 01 05:46:04 crc kubenswrapper[4661]: I1001 05:46:04.080307 4661 generic.go:334] "Generic (PLEG): container finished" podID="e892c883-6f23-415f-9e9d-bde45fefe01e" containerID="a008d0a1703a103911b89ed462c418ed4ee4538e583c895f482be7ec484af1a8" exitCode=0 Oct 01 05:46:04 crc kubenswrapper[4661]: I1001 05:46:04.080360 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e892c883-6f23-415f-9e9d-bde45fefe01e","Type":"ContainerDied","Data":"a008d0a1703a103911b89ed462c418ed4ee4538e583c895f482be7ec484af1a8"} Oct 01 05:46:04 crc kubenswrapper[4661]: I1001 05:46:04.194478 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 01 05:46:04 crc kubenswrapper[4661]: I1001 05:46:04.194581 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 01 05:46:05 crc kubenswrapper[4661]: I1001 05:46:05.546084 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-547b55867-bv6js" Oct 01 05:46:05 crc kubenswrapper[4661]: I1001 05:46:05.676396 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvrst\" (UniqueName: \"kubernetes.io/projected/7af783b5-defe-48af-9d81-5941b6cd9cff-kube-api-access-nvrst\") pod \"7af783b5-defe-48af-9d81-5941b6cd9cff\" (UID: \"7af783b5-defe-48af-9d81-5941b6cd9cff\") " Oct 01 05:46:05 crc kubenswrapper[4661]: I1001 05:46:05.676510 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7af783b5-defe-48af-9d81-5941b6cd9cff-config\") pod \"7af783b5-defe-48af-9d81-5941b6cd9cff\" (UID: \"7af783b5-defe-48af-9d81-5941b6cd9cff\") " Oct 01 05:46:05 crc kubenswrapper[4661]: I1001 05:46:05.676579 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7af783b5-defe-48af-9d81-5941b6cd9cff-dns-svc\") pod \"7af783b5-defe-48af-9d81-5941b6cd9cff\" (UID: \"7af783b5-defe-48af-9d81-5941b6cd9cff\") " Oct 01 05:46:05 crc kubenswrapper[4661]: I1001 05:46:05.685163 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7af783b5-defe-48af-9d81-5941b6cd9cff-kube-api-access-nvrst" (OuterVolumeSpecName: "kube-api-access-nvrst") pod "7af783b5-defe-48af-9d81-5941b6cd9cff" (UID: "7af783b5-defe-48af-9d81-5941b6cd9cff"). InnerVolumeSpecName "kube-api-access-nvrst". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:46:05 crc kubenswrapper[4661]: I1001 05:46:05.723599 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7af783b5-defe-48af-9d81-5941b6cd9cff-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7af783b5-defe-48af-9d81-5941b6cd9cff" (UID: "7af783b5-defe-48af-9d81-5941b6cd9cff"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:46:05 crc kubenswrapper[4661]: I1001 05:46:05.746097 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7af783b5-defe-48af-9d81-5941b6cd9cff-config" (OuterVolumeSpecName: "config") pod "7af783b5-defe-48af-9d81-5941b6cd9cff" (UID: "7af783b5-defe-48af-9d81-5941b6cd9cff"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:46:05 crc kubenswrapper[4661]: I1001 05:46:05.779168 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7af783b5-defe-48af-9d81-5941b6cd9cff-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:05 crc kubenswrapper[4661]: I1001 05:46:05.779255 4661 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7af783b5-defe-48af-9d81-5941b6cd9cff-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:05 crc kubenswrapper[4661]: I1001 05:46:05.779270 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvrst\" (UniqueName: \"kubernetes.io/projected/7af783b5-defe-48af-9d81-5941b6cd9cff-kube-api-access-nvrst\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:06 crc kubenswrapper[4661]: I1001 05:46:06.096701 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757cddf575-5w84n"] Oct 01 05:46:06 crc kubenswrapper[4661]: I1001 05:46:06.097772 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-etc-swift\") pod \"swift-storage-0\" (UID: \"e08e6861-2a19-4c40-8ed3-aeb2662d75bd\") " pod="openstack/swift-storage-0" Oct 01 05:46:06 crc kubenswrapper[4661]: E1001 05:46:06.098033 4661 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 01 05:46:06 crc kubenswrapper[4661]: E1001 05:46:06.098051 4661 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 01 05:46:06 crc kubenswrapper[4661]: E1001 05:46:06.098089 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-etc-swift podName:e08e6861-2a19-4c40-8ed3-aeb2662d75bd nodeName:}" failed. No retries permitted until 2025-10-01 05:46:14.098075721 +0000 UTC m=+1023.036054335 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-etc-swift") pod "swift-storage-0" (UID: "e08e6861-2a19-4c40-8ed3-aeb2662d75bd") : configmap "swift-ring-files" not found Oct 01 05:46:06 crc kubenswrapper[4661]: I1001 05:46:06.102709 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76f9c4c8bc-xbhc8"] Oct 01 05:46:06 crc kubenswrapper[4661]: I1001 05:46:06.104033 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"12b98504-ea2f-4b12-a55e-5c3f514817a4","Type":"ContainerStarted","Data":"b84b9721d6dd02868e54896137a5dbea2f3a8a9dee5a151bf3687af9dab86b7a"} Oct 01 05:46:06 crc kubenswrapper[4661]: I1001 05:46:06.105163 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-t28p6" event={"ID":"66f7b863-7d30-41b4-882c-c982fafa148a","Type":"ContainerStarted","Data":"212e098e21144c3351547ef0b3a078b10df425433de6f4f41d0876a99f771563"} Oct 01 05:46:06 crc kubenswrapper[4661]: I1001 05:46:06.121281 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-547b55867-bv6js" event={"ID":"7af783b5-defe-48af-9d81-5941b6cd9cff","Type":"ContainerDied","Data":"d04a4e76a6b7ec1b87b67f67ec2be2ff0275d0aab69c418102b70ae59b36c16e"} Oct 01 05:46:06 crc kubenswrapper[4661]: I1001 05:46:06.121327 4661 scope.go:117] "RemoveContainer" containerID="e791ba32501c4cda2d08d1ef9b43310c04f3fdb78eda7f3a2af02e89fe4669cd" Oct 01 05:46:06 crc kubenswrapper[4661]: I1001 05:46:06.121338 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-547b55867-bv6js" Oct 01 05:46:06 crc kubenswrapper[4661]: I1001 05:46:06.121811 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-t28p6" podStartSLOduration=2.231703311 podStartE2EDuration="8.121802034s" podCreationTimestamp="2025-10-01 05:45:58 +0000 UTC" firstStartedPulling="2025-10-01 05:45:59.643349768 +0000 UTC m=+1008.581328382" lastFinishedPulling="2025-10-01 05:46:05.533448481 +0000 UTC m=+1014.471427105" observedRunningTime="2025-10-01 05:46:06.121427184 +0000 UTC m=+1015.059405798" watchObservedRunningTime="2025-10-01 05:46:06.121802034 +0000 UTC m=+1015.059780648" Oct 01 05:46:06 crc kubenswrapper[4661]: I1001 05:46:06.141015 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e892c883-6f23-415f-9e9d-bde45fefe01e","Type":"ContainerStarted","Data":"1702c654ce4f41f482bee386bf56d22e997a33a78a46befd2a81ac1cb6ab16ba"} Oct 01 05:46:06 crc kubenswrapper[4661]: I1001 05:46:06.163718 4661 scope.go:117] "RemoveContainer" containerID="437202249b5ada7694e2a92613eb71dc90f3a55d23dfbf0203b29d759043e834" Oct 01 05:46:06 crc kubenswrapper[4661]: I1001 05:46:06.166826 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-547b55867-bv6js"] Oct 01 05:46:06 crc kubenswrapper[4661]: I1001 05:46:06.192976 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-547b55867-bv6js"] Oct 01 05:46:06 crc kubenswrapper[4661]: I1001 05:46:06.195150 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=-9223371983.659645 podStartE2EDuration="53.195129962s" podCreationTimestamp="2025-10-01 05:45:13 +0000 UTC" firstStartedPulling="2025-10-01 05:45:15.173478628 +0000 UTC m=+964.111457242" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:46:06.161146937 +0000 UTC m=+1015.099125551" watchObservedRunningTime="2025-10-01 05:46:06.195129962 +0000 UTC m=+1015.133108576" Oct 01 05:46:06 crc kubenswrapper[4661]: I1001 05:46:06.276132 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 01 05:46:06 crc kubenswrapper[4661]: I1001 05:46:06.348441 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 01 05:46:06 crc kubenswrapper[4661]: I1001 05:46:06.357941 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-hlpwz"] Oct 01 05:46:06 crc kubenswrapper[4661]: W1001 05:46:06.521712 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc93ef988_b9e9_4cfc_950b_2b3060b6e4b8.slice/crio-8d6b2156436d8fd4b6cb203d5d3b315cf466049363fa11f3b0e027ac186d3631 WatchSource:0}: Error finding container 8d6b2156436d8fd4b6cb203d5d3b315cf466049363fa11f3b0e027ac186d3631: Status 404 returned error can't find the container with id 8d6b2156436d8fd4b6cb203d5d3b315cf466049363fa11f3b0e027ac186d3631 Oct 01 05:46:07 crc kubenswrapper[4661]: I1001 05:46:07.022779 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 01 05:46:07 crc kubenswrapper[4661]: I1001 05:46:07.153977 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-hlpwz" event={"ID":"c93ef988-b9e9-4cfc-950b-2b3060b6e4b8","Type":"ContainerStarted","Data":"8d6b2156436d8fd4b6cb203d5d3b315cf466049363fa11f3b0e027ac186d3631"} Oct 01 05:46:07 crc kubenswrapper[4661]: I1001 05:46:07.155465 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823","Type":"ContainerStarted","Data":"0592d7434cd4d94a47dc111aede3ad22acc90932fa57b8f0737e606485dfd688"} Oct 01 05:46:07 crc kubenswrapper[4661]: I1001 05:46:07.158111 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757cddf575-5w84n" event={"ID":"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91","Type":"ContainerStarted","Data":"af3a8631fc2683ee1ecb092762412bcac444a8410ce4b366719ab4f1e86a6703"} Oct 01 05:46:07 crc kubenswrapper[4661]: I1001 05:46:07.163220 4661 generic.go:334] "Generic (PLEG): container finished" podID="0ba34d64-f5f8-4543-91ad-deddaaa978fc" containerID="d79757566c13ac688b66c06139663ec9b3375c5c6b3eb29f4ee239e83ac8781a" exitCode=0 Oct 01 05:46:07 crc kubenswrapper[4661]: I1001 05:46:07.164181 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" event={"ID":"0ba34d64-f5f8-4543-91ad-deddaaa978fc","Type":"ContainerDied","Data":"d79757566c13ac688b66c06139663ec9b3375c5c6b3eb29f4ee239e83ac8781a"} Oct 01 05:46:07 crc kubenswrapper[4661]: I1001 05:46:07.164224 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" event={"ID":"0ba34d64-f5f8-4543-91ad-deddaaa978fc","Type":"ContainerStarted","Data":"fdb966fcf1659485b92c4dfb2822ad1500adf43e605d5ce513d627851382c53a"} Oct 01 05:46:07 crc kubenswrapper[4661]: I1001 05:46:07.777605 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7af783b5-defe-48af-9d81-5941b6cd9cff" path="/var/lib/kubelet/pods/7af783b5-defe-48af-9d81-5941b6cd9cff/volumes" Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.175845 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823","Type":"ContainerStarted","Data":"f25cc81e6be3be0d20523952347ec73fbd465151374ffd70519a6bd3095e2f11"} Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.175907 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"8586024c-fe6a-4ccd-adc0-2e8e2a1bf823","Type":"ContainerStarted","Data":"4ec45f699a853cf2548749827958532862ab526750db1fbd6cda215a7c7de048"} Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.176008 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.179944 4661 generic.go:334] "Generic (PLEG): container finished" podID="30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91" containerID="5ecc96524ff04ebbc35ac775f6b07b1971c201743010c4f4104a06f1ad4ddcb8" exitCode=0 Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.180175 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757cddf575-5w84n" event={"ID":"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91","Type":"ContainerDied","Data":"5ecc96524ff04ebbc35ac775f6b07b1971c201743010c4f4104a06f1ad4ddcb8"} Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.185062 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" event={"ID":"0ba34d64-f5f8-4543-91ad-deddaaa978fc","Type":"ContainerStarted","Data":"62b3b78dff97f3c1c36180dbd4b71ae2f1dfac2dd35b4f4c53dab3136bdb2882"} Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.185165 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.187027 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7b2acad5-a746-42a5-b9e8-a9904ad242bc","Type":"ContainerStarted","Data":"039dfc87fb55da52d83c66c05a25ea5859a3d7bdb5cf40fe94cb117e3a2ca1d2"} Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.192625 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-hlpwz" event={"ID":"c93ef988-b9e9-4cfc-950b-2b3060b6e4b8","Type":"ContainerStarted","Data":"ba99fa0fca3c734fdf1efa618db5c437491c97670eb927455d03f6372d0bd992"} Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.214120 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=6.043820622 podStartE2EDuration="7.21409231s" podCreationTimestamp="2025-10-01 05:46:01 +0000 UTC" firstStartedPulling="2025-10-01 05:46:06.288070811 +0000 UTC m=+1015.226049425" lastFinishedPulling="2025-10-01 05:46:07.458342479 +0000 UTC m=+1016.396321113" observedRunningTime="2025-10-01 05:46:08.206336306 +0000 UTC m=+1017.144314950" watchObservedRunningTime="2025-10-01 05:46:08.21409231 +0000 UTC m=+1017.152070944" Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.255543 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-hlpwz" podStartSLOduration=7.25551756 podStartE2EDuration="7.25551756s" podCreationTimestamp="2025-10-01 05:46:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:46:08.239013666 +0000 UTC m=+1017.176992290" watchObservedRunningTime="2025-10-01 05:46:08.25551756 +0000 UTC m=+1017.193496164" Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.352188 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" podStartSLOduration=7.35216737 podStartE2EDuration="7.35216737s" podCreationTimestamp="2025-10-01 05:46:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:46:08.322919785 +0000 UTC m=+1017.260898449" watchObservedRunningTime="2025-10-01 05:46:08.35216737 +0000 UTC m=+1017.290145994" Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.713699 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757cddf575-5w84n" Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.849311 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-config\") pod \"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91\" (UID: \"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91\") " Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.849429 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2p7v\" (UniqueName: \"kubernetes.io/projected/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-kube-api-access-h2p7v\") pod \"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91\" (UID: \"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91\") " Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.849524 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-dns-svc\") pod \"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91\" (UID: \"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91\") " Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.849574 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-ovsdbserver-sb\") pod \"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91\" (UID: \"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91\") " Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.854797 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-kube-api-access-h2p7v" (OuterVolumeSpecName: "kube-api-access-h2p7v") pod "30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91" (UID: "30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91"). InnerVolumeSpecName "kube-api-access-h2p7v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.869973 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91" (UID: "30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.872125 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-config" (OuterVolumeSpecName: "config") pod "30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91" (UID: "30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.878897 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91" (UID: "30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.951289 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2p7v\" (UniqueName: \"kubernetes.io/projected/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-kube-api-access-h2p7v\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.951320 4661 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.951329 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:08 crc kubenswrapper[4661]: I1001 05:46:08.951338 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:09 crc kubenswrapper[4661]: I1001 05:46:09.202347 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757cddf575-5w84n" event={"ID":"30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91","Type":"ContainerDied","Data":"af3a8631fc2683ee1ecb092762412bcac444a8410ce4b366719ab4f1e86a6703"} Oct 01 05:46:09 crc kubenswrapper[4661]: I1001 05:46:09.202413 4661 scope.go:117] "RemoveContainer" containerID="5ecc96524ff04ebbc35ac775f6b07b1971c201743010c4f4104a06f1ad4ddcb8" Oct 01 05:46:09 crc kubenswrapper[4661]: I1001 05:46:09.203693 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757cddf575-5w84n" Oct 01 05:46:09 crc kubenswrapper[4661]: I1001 05:46:09.205308 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"12b98504-ea2f-4b12-a55e-5c3f514817a4","Type":"ContainerStarted","Data":"e88da81919f1cf59bd5361fe51f9e5e37ba1e82f639bf34da7f45995d01cb087"} Oct 01 05:46:09 crc kubenswrapper[4661]: I1001 05:46:09.299394 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757cddf575-5w84n"] Oct 01 05:46:09 crc kubenswrapper[4661]: I1001 05:46:09.308855 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-757cddf575-5w84n"] Oct 01 05:46:09 crc kubenswrapper[4661]: I1001 05:46:09.766604 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91" path="/var/lib/kubelet/pods/30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91/volumes" Oct 01 05:46:12 crc kubenswrapper[4661]: I1001 05:46:12.243210 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"12b98504-ea2f-4b12-a55e-5c3f514817a4","Type":"ContainerStarted","Data":"38229022bb87b1eb9eba4fc0e420626aba9e306b9e1c284a511514dde249b8aa"} Oct 01 05:46:12 crc kubenswrapper[4661]: I1001 05:46:12.292009 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=12.880341091 podStartE2EDuration="56.291978366s" podCreationTimestamp="2025-10-01 05:45:16 +0000 UTC" firstStartedPulling="2025-10-01 05:45:28.282449334 +0000 UTC m=+977.220427948" lastFinishedPulling="2025-10-01 05:46:11.694086589 +0000 UTC m=+1020.632065223" observedRunningTime="2025-10-01 05:46:12.280040977 +0000 UTC m=+1021.218019671" watchObservedRunningTime="2025-10-01 05:46:12.291978366 +0000 UTC m=+1021.229957010" Oct 01 05:46:13 crc kubenswrapper[4661]: I1001 05:46:13.253808 4661 generic.go:334] "Generic (PLEG): container finished" podID="66f7b863-7d30-41b4-882c-c982fafa148a" containerID="212e098e21144c3351547ef0b3a078b10df425433de6f4f41d0876a99f771563" exitCode=0 Oct 01 05:46:13 crc kubenswrapper[4661]: I1001 05:46:13.253942 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-t28p6" event={"ID":"66f7b863-7d30-41b4-882c-c982fafa148a","Type":"ContainerDied","Data":"212e098e21144c3351547ef0b3a078b10df425433de6f4f41d0876a99f771563"} Oct 01 05:46:13 crc kubenswrapper[4661]: I1001 05:46:13.589139 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.147990 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-etc-swift\") pod \"swift-storage-0\" (UID: \"e08e6861-2a19-4c40-8ed3-aeb2662d75bd\") " pod="openstack/swift-storage-0" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.160424 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e08e6861-2a19-4c40-8ed3-aeb2662d75bd-etc-swift\") pod \"swift-storage-0\" (UID: \"e08e6861-2a19-4c40-8ed3-aeb2662d75bd\") " pod="openstack/swift-storage-0" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.439680 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.606858 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.607850 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.664805 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.690226 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.758119 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgsgf\" (UniqueName: \"kubernetes.io/projected/66f7b863-7d30-41b4-882c-c982fafa148a-kube-api-access-mgsgf\") pod \"66f7b863-7d30-41b4-882c-c982fafa148a\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.758275 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/66f7b863-7d30-41b4-882c-c982fafa148a-ring-data-devices\") pod \"66f7b863-7d30-41b4-882c-c982fafa148a\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.758379 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66f7b863-7d30-41b4-882c-c982fafa148a-scripts\") pod \"66f7b863-7d30-41b4-882c-c982fafa148a\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.758516 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/66f7b863-7d30-41b4-882c-c982fafa148a-dispersionconf\") pod \"66f7b863-7d30-41b4-882c-c982fafa148a\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.758590 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66f7b863-7d30-41b4-882c-c982fafa148a-combined-ca-bundle\") pod \"66f7b863-7d30-41b4-882c-c982fafa148a\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.758684 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/66f7b863-7d30-41b4-882c-c982fafa148a-etc-swift\") pod \"66f7b863-7d30-41b4-882c-c982fafa148a\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.758758 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/66f7b863-7d30-41b4-882c-c982fafa148a-swiftconf\") pod \"66f7b863-7d30-41b4-882c-c982fafa148a\" (UID: \"66f7b863-7d30-41b4-882c-c982fafa148a\") " Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.759015 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66f7b863-7d30-41b4-882c-c982fafa148a-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "66f7b863-7d30-41b4-882c-c982fafa148a" (UID: "66f7b863-7d30-41b4-882c-c982fafa148a"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.759347 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66f7b863-7d30-41b4-882c-c982fafa148a-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "66f7b863-7d30-41b4-882c-c982fafa148a" (UID: "66f7b863-7d30-41b4-882c-c982fafa148a"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.760167 4661 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/66f7b863-7d30-41b4-882c-c982fafa148a-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.760230 4661 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/66f7b863-7d30-41b4-882c-c982fafa148a-ring-data-devices\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.764203 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66f7b863-7d30-41b4-882c-c982fafa148a-kube-api-access-mgsgf" (OuterVolumeSpecName: "kube-api-access-mgsgf") pod "66f7b863-7d30-41b4-882c-c982fafa148a" (UID: "66f7b863-7d30-41b4-882c-c982fafa148a"). InnerVolumeSpecName "kube-api-access-mgsgf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.768521 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66f7b863-7d30-41b4-882c-c982fafa148a-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "66f7b863-7d30-41b4-882c-c982fafa148a" (UID: "66f7b863-7d30-41b4-882c-c982fafa148a"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.799325 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66f7b863-7d30-41b4-882c-c982fafa148a-scripts" (OuterVolumeSpecName: "scripts") pod "66f7b863-7d30-41b4-882c-c982fafa148a" (UID: "66f7b863-7d30-41b4-882c-c982fafa148a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.800982 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66f7b863-7d30-41b4-882c-c982fafa148a-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "66f7b863-7d30-41b4-882c-c982fafa148a" (UID: "66f7b863-7d30-41b4-882c-c982fafa148a"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.810102 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66f7b863-7d30-41b4-882c-c982fafa148a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "66f7b863-7d30-41b4-882c-c982fafa148a" (UID: "66f7b863-7d30-41b4-882c-c982fafa148a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.837080 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-mttld"] Oct 01 05:46:14 crc kubenswrapper[4661]: E1001 05:46:14.837467 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91" containerName="init" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.837486 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91" containerName="init" Oct 01 05:46:14 crc kubenswrapper[4661]: E1001 05:46:14.837503 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7af783b5-defe-48af-9d81-5941b6cd9cff" containerName="init" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.837510 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="7af783b5-defe-48af-9d81-5941b6cd9cff" containerName="init" Oct 01 05:46:14 crc kubenswrapper[4661]: E1001 05:46:14.837525 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66f7b863-7d30-41b4-882c-c982fafa148a" containerName="swift-ring-rebalance" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.837531 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="66f7b863-7d30-41b4-882c-c982fafa148a" containerName="swift-ring-rebalance" Oct 01 05:46:14 crc kubenswrapper[4661]: E1001 05:46:14.837544 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7af783b5-defe-48af-9d81-5941b6cd9cff" containerName="dnsmasq-dns" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.837552 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="7af783b5-defe-48af-9d81-5941b6cd9cff" containerName="dnsmasq-dns" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.837719 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="7af783b5-defe-48af-9d81-5941b6cd9cff" containerName="dnsmasq-dns" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.837738 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="66f7b863-7d30-41b4-882c-c982fafa148a" containerName="swift-ring-rebalance" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.837765 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="30fb3e4e-6c13-4f04-bb89-a5edc4b6dc91" containerName="init" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.838324 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-mttld" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.844607 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-mttld"] Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.881764 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66f7b863-7d30-41b4-882c-c982fafa148a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.881790 4661 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/66f7b863-7d30-41b4-882c-c982fafa148a-swiftconf\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.881801 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgsgf\" (UniqueName: \"kubernetes.io/projected/66f7b863-7d30-41b4-882c-c982fafa148a-kube-api-access-mgsgf\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.881810 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66f7b863-7d30-41b4-882c-c982fafa148a-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.881819 4661 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/66f7b863-7d30-41b4-882c-c982fafa148a-dispersionconf\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:14 crc kubenswrapper[4661]: I1001 05:46:14.983875 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgtvp\" (UniqueName: \"kubernetes.io/projected/56cf476f-5124-495f-b4b8-4899a31e4f63-kube-api-access-sgtvp\") pod \"keystone-db-create-mttld\" (UID: \"56cf476f-5124-495f-b4b8-4899a31e4f63\") " pod="openstack/keystone-db-create-mttld" Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.044613 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-v4b88"] Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.045867 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-v4b88" Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.064170 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-v4b88"] Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.081655 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.086083 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgtvp\" (UniqueName: \"kubernetes.io/projected/56cf476f-5124-495f-b4b8-4899a31e4f63-kube-api-access-sgtvp\") pod \"keystone-db-create-mttld\" (UID: \"56cf476f-5124-495f-b4b8-4899a31e4f63\") " pod="openstack/keystone-db-create-mttld" Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.106527 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgtvp\" (UniqueName: \"kubernetes.io/projected/56cf476f-5124-495f-b4b8-4899a31e4f63-kube-api-access-sgtvp\") pod \"keystone-db-create-mttld\" (UID: \"56cf476f-5124-495f-b4b8-4899a31e4f63\") " pod="openstack/keystone-db-create-mttld" Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.187364 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kgt2\" (UniqueName: \"kubernetes.io/projected/4d068347-68e4-4719-8e0a-6a514729f385-kube-api-access-5kgt2\") pod \"placement-db-create-v4b88\" (UID: \"4d068347-68e4-4719-8e0a-6a514729f385\") " pod="openstack/placement-db-create-v4b88" Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.208423 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-mttld" Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.273865 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-t28p6" event={"ID":"66f7b863-7d30-41b4-882c-c982fafa148a","Type":"ContainerDied","Data":"3e493c747279b9e46b419380ef503921782b60035549578149bde10e27a9cb8f"} Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.273910 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e493c747279b9e46b419380ef503921782b60035549578149bde10e27a9cb8f" Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.273948 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-t28p6" Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.276805 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e08e6861-2a19-4c40-8ed3-aeb2662d75bd","Type":"ContainerStarted","Data":"8dbb365a4722f2898867bce55458f461f4c28a1eb04ec19cca419166757cf6a5"} Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.297143 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kgt2\" (UniqueName: \"kubernetes.io/projected/4d068347-68e4-4719-8e0a-6a514729f385-kube-api-access-5kgt2\") pod \"placement-db-create-v4b88\" (UID: \"4d068347-68e4-4719-8e0a-6a514729f385\") " pod="openstack/placement-db-create-v4b88" Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.321466 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kgt2\" (UniqueName: \"kubernetes.io/projected/4d068347-68e4-4719-8e0a-6a514729f385-kube-api-access-5kgt2\") pod \"placement-db-create-v4b88\" (UID: \"4d068347-68e4-4719-8e0a-6a514729f385\") " pod="openstack/placement-db-create-v4b88" Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.364465 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-v4b88" Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.394193 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-j2bph"] Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.395706 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-j2bph" Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.416585 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.417873 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-j2bph"] Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.520735 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jsns2\" (UniqueName: \"kubernetes.io/projected/1a6cf6dd-8146-4582-a2be-3525b97f43fa-kube-api-access-jsns2\") pod \"glance-db-create-j2bph\" (UID: \"1a6cf6dd-8146-4582-a2be-3525b97f43fa\") " pod="openstack/glance-db-create-j2bph" Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.607492 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-mttld"] Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.624008 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jsns2\" (UniqueName: \"kubernetes.io/projected/1a6cf6dd-8146-4582-a2be-3525b97f43fa-kube-api-access-jsns2\") pod \"glance-db-create-j2bph\" (UID: \"1a6cf6dd-8146-4582-a2be-3525b97f43fa\") " pod="openstack/glance-db-create-j2bph" Oct 01 05:46:15 crc kubenswrapper[4661]: W1001 05:46:15.627836 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod56cf476f_5124_495f_b4b8_4899a31e4f63.slice/crio-10da56052545f198d634be0f20decaf8e429a18d049455c3be7b4e29746c2397 WatchSource:0}: Error finding container 10da56052545f198d634be0f20decaf8e429a18d049455c3be7b4e29746c2397: Status 404 returned error can't find the container with id 10da56052545f198d634be0f20decaf8e429a18d049455c3be7b4e29746c2397 Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.656576 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jsns2\" (UniqueName: \"kubernetes.io/projected/1a6cf6dd-8146-4582-a2be-3525b97f43fa-kube-api-access-jsns2\") pod \"glance-db-create-j2bph\" (UID: \"1a6cf6dd-8146-4582-a2be-3525b97f43fa\") " pod="openstack/glance-db-create-j2bph" Oct 01 05:46:15 crc kubenswrapper[4661]: I1001 05:46:15.762509 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-j2bph" Oct 01 05:46:16 crc kubenswrapper[4661]: I1001 05:46:16.016934 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-v4b88"] Oct 01 05:46:16 crc kubenswrapper[4661]: I1001 05:46:16.262426 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-j2bph"] Oct 01 05:46:16 crc kubenswrapper[4661]: I1001 05:46:16.290964 4661 generic.go:334] "Generic (PLEG): container finished" podID="56cf476f-5124-495f-b4b8-4899a31e4f63" containerID="a0f329ac31d84c6ebe1dba5e3f2b9377e7c946b59ca2b4dc7db376114e6aa43c" exitCode=0 Oct 01 05:46:16 crc kubenswrapper[4661]: I1001 05:46:16.291719 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-mttld" event={"ID":"56cf476f-5124-495f-b4b8-4899a31e4f63","Type":"ContainerDied","Data":"a0f329ac31d84c6ebe1dba5e3f2b9377e7c946b59ca2b4dc7db376114e6aa43c"} Oct 01 05:46:16 crc kubenswrapper[4661]: I1001 05:46:16.291783 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-mttld" event={"ID":"56cf476f-5124-495f-b4b8-4899a31e4f63","Type":"ContainerStarted","Data":"10da56052545f198d634be0f20decaf8e429a18d049455c3be7b4e29746c2397"} Oct 01 05:46:16 crc kubenswrapper[4661]: I1001 05:46:16.967917 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.047731 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5449989c59-nh67m"] Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.048154 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5449989c59-nh67m" podUID="677c3be8-2587-44d1-8545-65238de20248" containerName="dnsmasq-dns" containerID="cri-o://1eb43813fa89e657ce65734a5fee0655b4a1968a6fddbd0b31d90d952dfe063e" gracePeriod=10 Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.092687 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-db-create-ppl9g"] Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.093832 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-ppl9g" Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.097405 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-create-ppl9g"] Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.161475 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9tpb\" (UniqueName: \"kubernetes.io/projected/3c08e846-393c-4f97-904d-f0a5c89a33e5-kube-api-access-x9tpb\") pod \"watcher-db-create-ppl9g\" (UID: \"3c08e846-393c-4f97-904d-f0a5c89a33e5\") " pod="openstack/watcher-db-create-ppl9g" Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.264841 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9tpb\" (UniqueName: \"kubernetes.io/projected/3c08e846-393c-4f97-904d-f0a5c89a33e5-kube-api-access-x9tpb\") pod \"watcher-db-create-ppl9g\" (UID: \"3c08e846-393c-4f97-904d-f0a5c89a33e5\") " pod="openstack/watcher-db-create-ppl9g" Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.303069 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9tpb\" (UniqueName: \"kubernetes.io/projected/3c08e846-393c-4f97-904d-f0a5c89a33e5-kube-api-access-x9tpb\") pod \"watcher-db-create-ppl9g\" (UID: \"3c08e846-393c-4f97-904d-f0a5c89a33e5\") " pod="openstack/watcher-db-create-ppl9g" Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.322721 4661 generic.go:334] "Generic (PLEG): container finished" podID="4d068347-68e4-4719-8e0a-6a514729f385" containerID="a6c6b7639a2547d364919b1640328a8e77276a0f4eefcc02b71f74dc06e80de9" exitCode=0 Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.322827 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-v4b88" event={"ID":"4d068347-68e4-4719-8e0a-6a514729f385","Type":"ContainerDied","Data":"a6c6b7639a2547d364919b1640328a8e77276a0f4eefcc02b71f74dc06e80de9"} Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.322866 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-v4b88" event={"ID":"4d068347-68e4-4719-8e0a-6a514729f385","Type":"ContainerStarted","Data":"dd4e7aef1b107ad0d006d706551326174edf104500bb06906d5fbf6be4de5389"} Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.328901 4661 generic.go:334] "Generic (PLEG): container finished" podID="677c3be8-2587-44d1-8545-65238de20248" containerID="1eb43813fa89e657ce65734a5fee0655b4a1968a6fddbd0b31d90d952dfe063e" exitCode=0 Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.329042 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5449989c59-nh67m" event={"ID":"677c3be8-2587-44d1-8545-65238de20248","Type":"ContainerDied","Data":"1eb43813fa89e657ce65734a5fee0655b4a1968a6fddbd0b31d90d952dfe063e"} Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.336118 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e08e6861-2a19-4c40-8ed3-aeb2662d75bd","Type":"ContainerStarted","Data":"cdbc2767fb6b1ceed45870f3e64cc2340bdec56727546a4c5863fc8775a9f921"} Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.336202 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e08e6861-2a19-4c40-8ed3-aeb2662d75bd","Type":"ContainerStarted","Data":"cea5b8f00545cbb76cf82bf3abef5bef069e6168b8c8aaaaf0e84bfbf5f90204"} Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.336221 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e08e6861-2a19-4c40-8ed3-aeb2662d75bd","Type":"ContainerStarted","Data":"83fbc1520eb816125e5a758f01eef7319eb15af02738b12680d48d0799b7a99d"} Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.336235 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e08e6861-2a19-4c40-8ed3-aeb2662d75bd","Type":"ContainerStarted","Data":"3cb8ed66f5c376ba276973f4112acec6bee61f2c2a74455831578b01b05633f0"} Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.338069 4661 generic.go:334] "Generic (PLEG): container finished" podID="1a6cf6dd-8146-4582-a2be-3525b97f43fa" containerID="fa5e8dccb39b0d77e514de26ef0f261a01fbc69e6ec0569683c6ea8af4a66637" exitCode=0 Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.338895 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-j2bph" event={"ID":"1a6cf6dd-8146-4582-a2be-3525b97f43fa","Type":"ContainerDied","Data":"fa5e8dccb39b0d77e514de26ef0f261a01fbc69e6ec0569683c6ea8af4a66637"} Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.338927 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-j2bph" event={"ID":"1a6cf6dd-8146-4582-a2be-3525b97f43fa","Type":"ContainerStarted","Data":"49eac7abc330151d3077198b02ab5c75b96ef9fadfbbc20b3c175d02ebaf8630"} Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.531156 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-ppl9g" Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.581880 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5449989c59-nh67m" Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.677217 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brvsm\" (UniqueName: \"kubernetes.io/projected/677c3be8-2587-44d1-8545-65238de20248-kube-api-access-brvsm\") pod \"677c3be8-2587-44d1-8545-65238de20248\" (UID: \"677c3be8-2587-44d1-8545-65238de20248\") " Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.677260 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/677c3be8-2587-44d1-8545-65238de20248-config\") pod \"677c3be8-2587-44d1-8545-65238de20248\" (UID: \"677c3be8-2587-44d1-8545-65238de20248\") " Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.677420 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/677c3be8-2587-44d1-8545-65238de20248-dns-svc\") pod \"677c3be8-2587-44d1-8545-65238de20248\" (UID: \"677c3be8-2587-44d1-8545-65238de20248\") " Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.689052 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/677c3be8-2587-44d1-8545-65238de20248-kube-api-access-brvsm" (OuterVolumeSpecName: "kube-api-access-brvsm") pod "677c3be8-2587-44d1-8545-65238de20248" (UID: "677c3be8-2587-44d1-8545-65238de20248"). InnerVolumeSpecName "kube-api-access-brvsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.710437 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-mttld" Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.736948 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/677c3be8-2587-44d1-8545-65238de20248-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "677c3be8-2587-44d1-8545-65238de20248" (UID: "677c3be8-2587-44d1-8545-65238de20248"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.749334 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/677c3be8-2587-44d1-8545-65238de20248-config" (OuterVolumeSpecName: "config") pod "677c3be8-2587-44d1-8545-65238de20248" (UID: "677c3be8-2587-44d1-8545-65238de20248"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.785448 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brvsm\" (UniqueName: \"kubernetes.io/projected/677c3be8-2587-44d1-8545-65238de20248-kube-api-access-brvsm\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.785506 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/677c3be8-2587-44d1-8545-65238de20248-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.785518 4661 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/677c3be8-2587-44d1-8545-65238de20248-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.887223 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgtvp\" (UniqueName: \"kubernetes.io/projected/56cf476f-5124-495f-b4b8-4899a31e4f63-kube-api-access-sgtvp\") pod \"56cf476f-5124-495f-b4b8-4899a31e4f63\" (UID: \"56cf476f-5124-495f-b4b8-4899a31e4f63\") " Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.893989 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56cf476f-5124-495f-b4b8-4899a31e4f63-kube-api-access-sgtvp" (OuterVolumeSpecName: "kube-api-access-sgtvp") pod "56cf476f-5124-495f-b4b8-4899a31e4f63" (UID: "56cf476f-5124-495f-b4b8-4899a31e4f63"). InnerVolumeSpecName "kube-api-access-sgtvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:46:17 crc kubenswrapper[4661]: I1001 05:46:17.989550 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgtvp\" (UniqueName: \"kubernetes.io/projected/56cf476f-5124-495f-b4b8-4899a31e4f63-kube-api-access-sgtvp\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:18 crc kubenswrapper[4661]: I1001 05:46:18.019884 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-create-ppl9g"] Oct 01 05:46:18 crc kubenswrapper[4661]: W1001 05:46:18.032098 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c08e846_393c_4f97_904d_f0a5c89a33e5.slice/crio-d7d7a22b0d9b71418a6daee8ba9fadd96d11c19a18197046bbde442a94e8d25e WatchSource:0}: Error finding container d7d7a22b0d9b71418a6daee8ba9fadd96d11c19a18197046bbde442a94e8d25e: Status 404 returned error can't find the container with id d7d7a22b0d9b71418a6daee8ba9fadd96d11c19a18197046bbde442a94e8d25e Oct 01 05:46:18 crc kubenswrapper[4661]: I1001 05:46:18.349979 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5449989c59-nh67m" event={"ID":"677c3be8-2587-44d1-8545-65238de20248","Type":"ContainerDied","Data":"9cf828550b84c005e0e571a7cb0e572ca4d4bc26597d9d9e9b6159e2d8a9ed12"} Oct 01 05:46:18 crc kubenswrapper[4661]: I1001 05:46:18.350078 4661 scope.go:117] "RemoveContainer" containerID="1eb43813fa89e657ce65734a5fee0655b4a1968a6fddbd0b31d90d952dfe063e" Oct 01 05:46:18 crc kubenswrapper[4661]: I1001 05:46:18.350234 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5449989c59-nh67m" Oct 01 05:46:18 crc kubenswrapper[4661]: I1001 05:46:18.359190 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-mttld" event={"ID":"56cf476f-5124-495f-b4b8-4899a31e4f63","Type":"ContainerDied","Data":"10da56052545f198d634be0f20decaf8e429a18d049455c3be7b4e29746c2397"} Oct 01 05:46:18 crc kubenswrapper[4661]: I1001 05:46:18.359226 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-mttld" Oct 01 05:46:18 crc kubenswrapper[4661]: I1001 05:46:18.359235 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="10da56052545f198d634be0f20decaf8e429a18d049455c3be7b4e29746c2397" Oct 01 05:46:18 crc kubenswrapper[4661]: I1001 05:46:18.364260 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-ppl9g" event={"ID":"3c08e846-393c-4f97-904d-f0a5c89a33e5","Type":"ContainerStarted","Data":"5cb8ed685ba5e03c2e0b55c5fb97a9e8a1c638d574f449195cdb031d532cfd6a"} Oct 01 05:46:18 crc kubenswrapper[4661]: I1001 05:46:18.364298 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-ppl9g" event={"ID":"3c08e846-393c-4f97-904d-f0a5c89a33e5","Type":"ContainerStarted","Data":"d7d7a22b0d9b71418a6daee8ba9fadd96d11c19a18197046bbde442a94e8d25e"} Oct 01 05:46:18 crc kubenswrapper[4661]: I1001 05:46:18.392141 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5449989c59-nh67m"] Oct 01 05:46:18 crc kubenswrapper[4661]: I1001 05:46:18.392692 4661 scope.go:117] "RemoveContainer" containerID="9a688bf9a0b39be5283f35c55011920252fa2c64a2cebfd67000ef1c6b6453ab" Oct 01 05:46:18 crc kubenswrapper[4661]: I1001 05:46:18.400035 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5449989c59-nh67m"] Oct 01 05:46:18 crc kubenswrapper[4661]: I1001 05:46:18.409264 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-db-create-ppl9g" podStartSLOduration=1.409250281 podStartE2EDuration="1.409250281s" podCreationTimestamp="2025-10-01 05:46:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:46:18.404318126 +0000 UTC m=+1027.342296750" watchObservedRunningTime="2025-10-01 05:46:18.409250281 +0000 UTC m=+1027.347228905" Oct 01 05:46:18 crc kubenswrapper[4661]: I1001 05:46:18.589893 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:18 crc kubenswrapper[4661]: I1001 05:46:18.592565 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:19 crc kubenswrapper[4661]: I1001 05:46:19.376122 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-j2bph" event={"ID":"1a6cf6dd-8146-4582-a2be-3525b97f43fa","Type":"ContainerDied","Data":"49eac7abc330151d3077198b02ab5c75b96ef9fadfbbc20b3c175d02ebaf8630"} Oct 01 05:46:19 crc kubenswrapper[4661]: I1001 05:46:19.376763 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49eac7abc330151d3077198b02ab5c75b96ef9fadfbbc20b3c175d02ebaf8630" Oct 01 05:46:19 crc kubenswrapper[4661]: I1001 05:46:19.379274 4661 generic.go:334] "Generic (PLEG): container finished" podID="3c08e846-393c-4f97-904d-f0a5c89a33e5" containerID="5cb8ed685ba5e03c2e0b55c5fb97a9e8a1c638d574f449195cdb031d532cfd6a" exitCode=0 Oct 01 05:46:19 crc kubenswrapper[4661]: I1001 05:46:19.379404 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-ppl9g" event={"ID":"3c08e846-393c-4f97-904d-f0a5c89a33e5","Type":"ContainerDied","Data":"5cb8ed685ba5e03c2e0b55c5fb97a9e8a1c638d574f449195cdb031d532cfd6a"} Oct 01 05:46:19 crc kubenswrapper[4661]: I1001 05:46:19.385020 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-v4b88" event={"ID":"4d068347-68e4-4719-8e0a-6a514729f385","Type":"ContainerDied","Data":"dd4e7aef1b107ad0d006d706551326174edf104500bb06906d5fbf6be4de5389"} Oct 01 05:46:19 crc kubenswrapper[4661]: I1001 05:46:19.385068 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd4e7aef1b107ad0d006d706551326174edf104500bb06906d5fbf6be4de5389" Oct 01 05:46:19 crc kubenswrapper[4661]: I1001 05:46:19.387054 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:19 crc kubenswrapper[4661]: I1001 05:46:19.441743 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-v4b88" Oct 01 05:46:19 crc kubenswrapper[4661]: I1001 05:46:19.455739 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-j2bph" Oct 01 05:46:19 crc kubenswrapper[4661]: I1001 05:46:19.534763 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jsns2\" (UniqueName: \"kubernetes.io/projected/1a6cf6dd-8146-4582-a2be-3525b97f43fa-kube-api-access-jsns2\") pod \"1a6cf6dd-8146-4582-a2be-3525b97f43fa\" (UID: \"1a6cf6dd-8146-4582-a2be-3525b97f43fa\") " Oct 01 05:46:19 crc kubenswrapper[4661]: I1001 05:46:19.534858 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5kgt2\" (UniqueName: \"kubernetes.io/projected/4d068347-68e4-4719-8e0a-6a514729f385-kube-api-access-5kgt2\") pod \"4d068347-68e4-4719-8e0a-6a514729f385\" (UID: \"4d068347-68e4-4719-8e0a-6a514729f385\") " Oct 01 05:46:19 crc kubenswrapper[4661]: I1001 05:46:19.544537 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d068347-68e4-4719-8e0a-6a514729f385-kube-api-access-5kgt2" (OuterVolumeSpecName: "kube-api-access-5kgt2") pod "4d068347-68e4-4719-8e0a-6a514729f385" (UID: "4d068347-68e4-4719-8e0a-6a514729f385"). InnerVolumeSpecName "kube-api-access-5kgt2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:46:19 crc kubenswrapper[4661]: I1001 05:46:19.549258 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a6cf6dd-8146-4582-a2be-3525b97f43fa-kube-api-access-jsns2" (OuterVolumeSpecName: "kube-api-access-jsns2") pod "1a6cf6dd-8146-4582-a2be-3525b97f43fa" (UID: "1a6cf6dd-8146-4582-a2be-3525b97f43fa"). InnerVolumeSpecName "kube-api-access-jsns2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:46:19 crc kubenswrapper[4661]: I1001 05:46:19.637973 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jsns2\" (UniqueName: \"kubernetes.io/projected/1a6cf6dd-8146-4582-a2be-3525b97f43fa-kube-api-access-jsns2\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:19 crc kubenswrapper[4661]: I1001 05:46:19.638037 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5kgt2\" (UniqueName: \"kubernetes.io/projected/4d068347-68e4-4719-8e0a-6a514729f385-kube-api-access-5kgt2\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:19 crc kubenswrapper[4661]: I1001 05:46:19.768483 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="677c3be8-2587-44d1-8545-65238de20248" path="/var/lib/kubelet/pods/677c3be8-2587-44d1-8545-65238de20248/volumes" Oct 01 05:46:20 crc kubenswrapper[4661]: I1001 05:46:20.411131 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e08e6861-2a19-4c40-8ed3-aeb2662d75bd","Type":"ContainerStarted","Data":"921d3a8237f77f2f60803d2f688f5028e862601d6d7dffb48c84178d7ee14e9a"} Oct 01 05:46:20 crc kubenswrapper[4661]: I1001 05:46:20.411525 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e08e6861-2a19-4c40-8ed3-aeb2662d75bd","Type":"ContainerStarted","Data":"c11ad8c6a5312a44d639fb67fe044a02cc6f44ea887f76199f91bd48f2ee228d"} Oct 01 05:46:20 crc kubenswrapper[4661]: I1001 05:46:20.411551 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e08e6861-2a19-4c40-8ed3-aeb2662d75bd","Type":"ContainerStarted","Data":"2e6023b413227995e8b930b1758ec0bad5ed2608e7a075c5c3a11b9211af3da7"} Oct 01 05:46:20 crc kubenswrapper[4661]: I1001 05:46:20.411562 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e08e6861-2a19-4c40-8ed3-aeb2662d75bd","Type":"ContainerStarted","Data":"5e15215782223576d5dd14a1222ead7fb93f4ca553a0c6429876723a9b7135bf"} Oct 01 05:46:20 crc kubenswrapper[4661]: I1001 05:46:20.411187 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-j2bph" Oct 01 05:46:20 crc kubenswrapper[4661]: I1001 05:46:20.411147 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-v4b88" Oct 01 05:46:21 crc kubenswrapper[4661]: I1001 05:46:21.045264 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-ppl9g" Oct 01 05:46:21 crc kubenswrapper[4661]: I1001 05:46:21.162850 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9tpb\" (UniqueName: \"kubernetes.io/projected/3c08e846-393c-4f97-904d-f0a5c89a33e5-kube-api-access-x9tpb\") pod \"3c08e846-393c-4f97-904d-f0a5c89a33e5\" (UID: \"3c08e846-393c-4f97-904d-f0a5c89a33e5\") " Oct 01 05:46:21 crc kubenswrapper[4661]: I1001 05:46:21.168899 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c08e846-393c-4f97-904d-f0a5c89a33e5-kube-api-access-x9tpb" (OuterVolumeSpecName: "kube-api-access-x9tpb") pod "3c08e846-393c-4f97-904d-f0a5c89a33e5" (UID: "3c08e846-393c-4f97-904d-f0a5c89a33e5"). InnerVolumeSpecName "kube-api-access-x9tpb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:46:21 crc kubenswrapper[4661]: I1001 05:46:21.265401 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9tpb\" (UniqueName: \"kubernetes.io/projected/3c08e846-393c-4f97-904d-f0a5c89a33e5-kube-api-access-x9tpb\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:21 crc kubenswrapper[4661]: I1001 05:46:21.468400 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 01 05:46:21 crc kubenswrapper[4661]: I1001 05:46:21.472120 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e08e6861-2a19-4c40-8ed3-aeb2662d75bd","Type":"ContainerStarted","Data":"489302b085b3a361feb8f74bc24654ba646be17d50b057ec4deb39659ed25cb5"} Oct 01 05:46:21 crc kubenswrapper[4661]: I1001 05:46:21.477338 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-ppl9g" Oct 01 05:46:21 crc kubenswrapper[4661]: I1001 05:46:21.477368 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-ppl9g" event={"ID":"3c08e846-393c-4f97-904d-f0a5c89a33e5","Type":"ContainerDied","Data":"d7d7a22b0d9b71418a6daee8ba9fadd96d11c19a18197046bbde442a94e8d25e"} Oct 01 05:46:21 crc kubenswrapper[4661]: I1001 05:46:21.477399 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d7d7a22b0d9b71418a6daee8ba9fadd96d11c19a18197046bbde442a94e8d25e" Oct 01 05:46:21 crc kubenswrapper[4661]: I1001 05:46:21.477659 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="12b98504-ea2f-4b12-a55e-5c3f514817a4" containerName="thanos-sidecar" containerID="cri-o://38229022bb87b1eb9eba4fc0e420626aba9e306b9e1c284a511514dde249b8aa" gracePeriod=600 Oct 01 05:46:21 crc kubenswrapper[4661]: I1001 05:46:21.477690 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="12b98504-ea2f-4b12-a55e-5c3f514817a4" containerName="config-reloader" containerID="cri-o://e88da81919f1cf59bd5361fe51f9e5e37ba1e82f639bf34da7f45995d01cb087" gracePeriod=600 Oct 01 05:46:21 crc kubenswrapper[4661]: I1001 05:46:21.482722 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="12b98504-ea2f-4b12-a55e-5c3f514817a4" containerName="prometheus" containerID="cri-o://b84b9721d6dd02868e54896137a5dbea2f3a8a9dee5a151bf3687af9dab86b7a" gracePeriod=600 Oct 01 05:46:21 crc kubenswrapper[4661]: I1001 05:46:21.887057 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.501166 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e08e6861-2a19-4c40-8ed3-aeb2662d75bd","Type":"ContainerStarted","Data":"b6d82c50ccd199eaec1e16def761df2ef39d9709f6f7006e22777de0815c0210"} Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.501438 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e08e6861-2a19-4c40-8ed3-aeb2662d75bd","Type":"ContainerStarted","Data":"829988d4d1c958a069257ef0fd8e4a1d505ab3381b28f52f98dd9275e0c76806"} Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.501448 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e08e6861-2a19-4c40-8ed3-aeb2662d75bd","Type":"ContainerStarted","Data":"7f56231ce498c0ea8ee19a65bda6bd057cdd7cfa0d4dde809fd0b1a2980679c5"} Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.501456 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e08e6861-2a19-4c40-8ed3-aeb2662d75bd","Type":"ContainerStarted","Data":"a7091d49c15be5dc1d826fa75cee418f002b6eac037fb1c5756448e40f6e263d"} Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.504749 4661 generic.go:334] "Generic (PLEG): container finished" podID="12b98504-ea2f-4b12-a55e-5c3f514817a4" containerID="38229022bb87b1eb9eba4fc0e420626aba9e306b9e1c284a511514dde249b8aa" exitCode=0 Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.504774 4661 generic.go:334] "Generic (PLEG): container finished" podID="12b98504-ea2f-4b12-a55e-5c3f514817a4" containerID="e88da81919f1cf59bd5361fe51f9e5e37ba1e82f639bf34da7f45995d01cb087" exitCode=0 Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.504782 4661 generic.go:334] "Generic (PLEG): container finished" podID="12b98504-ea2f-4b12-a55e-5c3f514817a4" containerID="b84b9721d6dd02868e54896137a5dbea2f3a8a9dee5a151bf3687af9dab86b7a" exitCode=0 Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.504798 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"12b98504-ea2f-4b12-a55e-5c3f514817a4","Type":"ContainerDied","Data":"38229022bb87b1eb9eba4fc0e420626aba9e306b9e1c284a511514dde249b8aa"} Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.504829 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"12b98504-ea2f-4b12-a55e-5c3f514817a4","Type":"ContainerDied","Data":"e88da81919f1cf59bd5361fe51f9e5e37ba1e82f639bf34da7f45995d01cb087"} Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.504841 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"12b98504-ea2f-4b12-a55e-5c3f514817a4","Type":"ContainerDied","Data":"b84b9721d6dd02868e54896137a5dbea2f3a8a9dee5a151bf3687af9dab86b7a"} Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.625527 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.692472 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/12b98504-ea2f-4b12-a55e-5c3f514817a4-web-config\") pod \"12b98504-ea2f-4b12-a55e-5c3f514817a4\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.692577 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wltdz\" (UniqueName: \"kubernetes.io/projected/12b98504-ea2f-4b12-a55e-5c3f514817a4-kube-api-access-wltdz\") pod \"12b98504-ea2f-4b12-a55e-5c3f514817a4\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.692646 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/12b98504-ea2f-4b12-a55e-5c3f514817a4-config\") pod \"12b98504-ea2f-4b12-a55e-5c3f514817a4\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.692720 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/12b98504-ea2f-4b12-a55e-5c3f514817a4-tls-assets\") pod \"12b98504-ea2f-4b12-a55e-5c3f514817a4\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.692808 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/12b98504-ea2f-4b12-a55e-5c3f514817a4-prometheus-metric-storage-rulefiles-0\") pod \"12b98504-ea2f-4b12-a55e-5c3f514817a4\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.693363 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12b98504-ea2f-4b12-a55e-5c3f514817a4-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "12b98504-ea2f-4b12-a55e-5c3f514817a4" (UID: "12b98504-ea2f-4b12-a55e-5c3f514817a4"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.693446 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/12b98504-ea2f-4b12-a55e-5c3f514817a4-thanos-prometheus-http-client-file\") pod \"12b98504-ea2f-4b12-a55e-5c3f514817a4\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.693787 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/12b98504-ea2f-4b12-a55e-5c3f514817a4-config-out\") pod \"12b98504-ea2f-4b12-a55e-5c3f514817a4\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.693901 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70227665-af75-4dfe-9648-95f31cf3d818\") pod \"12b98504-ea2f-4b12-a55e-5c3f514817a4\" (UID: \"12b98504-ea2f-4b12-a55e-5c3f514817a4\") " Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.694273 4661 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/12b98504-ea2f-4b12-a55e-5c3f514817a4-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.700442 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12b98504-ea2f-4b12-a55e-5c3f514817a4-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "12b98504-ea2f-4b12-a55e-5c3f514817a4" (UID: "12b98504-ea2f-4b12-a55e-5c3f514817a4"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.700468 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12b98504-ea2f-4b12-a55e-5c3f514817a4-kube-api-access-wltdz" (OuterVolumeSpecName: "kube-api-access-wltdz") pod "12b98504-ea2f-4b12-a55e-5c3f514817a4" (UID: "12b98504-ea2f-4b12-a55e-5c3f514817a4"). InnerVolumeSpecName "kube-api-access-wltdz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.700461 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12b98504-ea2f-4b12-a55e-5c3f514817a4-config" (OuterVolumeSpecName: "config") pod "12b98504-ea2f-4b12-a55e-5c3f514817a4" (UID: "12b98504-ea2f-4b12-a55e-5c3f514817a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.701108 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12b98504-ea2f-4b12-a55e-5c3f514817a4-config-out" (OuterVolumeSpecName: "config-out") pod "12b98504-ea2f-4b12-a55e-5c3f514817a4" (UID: "12b98504-ea2f-4b12-a55e-5c3f514817a4"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.716195 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70227665-af75-4dfe-9648-95f31cf3d818" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "12b98504-ea2f-4b12-a55e-5c3f514817a4" (UID: "12b98504-ea2f-4b12-a55e-5c3f514817a4"). InnerVolumeSpecName "pvc-70227665-af75-4dfe-9648-95f31cf3d818". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.719780 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12b98504-ea2f-4b12-a55e-5c3f514817a4-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "12b98504-ea2f-4b12-a55e-5c3f514817a4" (UID: "12b98504-ea2f-4b12-a55e-5c3f514817a4"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.751914 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12b98504-ea2f-4b12-a55e-5c3f514817a4-web-config" (OuterVolumeSpecName: "web-config") pod "12b98504-ea2f-4b12-a55e-5c3f514817a4" (UID: "12b98504-ea2f-4b12-a55e-5c3f514817a4"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.795444 4661 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/12b98504-ea2f-4b12-a55e-5c3f514817a4-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.795715 4661 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/12b98504-ea2f-4b12-a55e-5c3f514817a4-config-out\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.795807 4661 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-70227665-af75-4dfe-9648-95f31cf3d818\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70227665-af75-4dfe-9648-95f31cf3d818\") on node \"crc\" " Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.795868 4661 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/12b98504-ea2f-4b12-a55e-5c3f514817a4-web-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.795932 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wltdz\" (UniqueName: \"kubernetes.io/projected/12b98504-ea2f-4b12-a55e-5c3f514817a4-kube-api-access-wltdz\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.796007 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/12b98504-ea2f-4b12-a55e-5c3f514817a4-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.796110 4661 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/12b98504-ea2f-4b12-a55e-5c3f514817a4-tls-assets\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.812230 4661 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.812420 4661 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-70227665-af75-4dfe-9648-95f31cf3d818" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70227665-af75-4dfe-9648-95f31cf3d818") on node "crc" Oct 01 05:46:22 crc kubenswrapper[4661]: I1001 05:46:22.900766 4661 reconciler_common.go:293] "Volume detached for volume \"pvc-70227665-af75-4dfe-9648-95f31cf3d818\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70227665-af75-4dfe-9648-95f31cf3d818\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.526250 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e08e6861-2a19-4c40-8ed3-aeb2662d75bd","Type":"ContainerStarted","Data":"3276afc221770d65b1d9e002f7202bca388d6e17eb1629f83a000abeae0b20c1"} Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.531505 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"12b98504-ea2f-4b12-a55e-5c3f514817a4","Type":"ContainerDied","Data":"b86144ef7abcaf8537b65cd4eb8a1e36032edca0fa402f3485146f4b9e29a262"} Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.531570 4661 scope.go:117] "RemoveContainer" containerID="38229022bb87b1eb9eba4fc0e420626aba9e306b9e1c284a511514dde249b8aa" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.531612 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.583429 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.591544 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.624199 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 01 05:46:23 crc kubenswrapper[4661]: E1001 05:46:23.624874 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12b98504-ea2f-4b12-a55e-5c3f514817a4" containerName="init-config-reloader" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.624966 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="12b98504-ea2f-4b12-a55e-5c3f514817a4" containerName="init-config-reloader" Oct 01 05:46:23 crc kubenswrapper[4661]: E1001 05:46:23.625595 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12b98504-ea2f-4b12-a55e-5c3f514817a4" containerName="prometheus" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.625768 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="12b98504-ea2f-4b12-a55e-5c3f514817a4" containerName="prometheus" Oct 01 05:46:23 crc kubenswrapper[4661]: E1001 05:46:23.625856 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56cf476f-5124-495f-b4b8-4899a31e4f63" containerName="mariadb-database-create" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.625925 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="56cf476f-5124-495f-b4b8-4899a31e4f63" containerName="mariadb-database-create" Oct 01 05:46:23 crc kubenswrapper[4661]: E1001 05:46:23.626497 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12b98504-ea2f-4b12-a55e-5c3f514817a4" containerName="thanos-sidecar" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.626679 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="12b98504-ea2f-4b12-a55e-5c3f514817a4" containerName="thanos-sidecar" Oct 01 05:46:23 crc kubenswrapper[4661]: E1001 05:46:23.626779 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d068347-68e4-4719-8e0a-6a514729f385" containerName="mariadb-database-create" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.626858 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d068347-68e4-4719-8e0a-6a514729f385" containerName="mariadb-database-create" Oct 01 05:46:23 crc kubenswrapper[4661]: E1001 05:46:23.626941 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="677c3be8-2587-44d1-8545-65238de20248" containerName="init" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.627062 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="677c3be8-2587-44d1-8545-65238de20248" containerName="init" Oct 01 05:46:23 crc kubenswrapper[4661]: E1001 05:46:23.627486 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="677c3be8-2587-44d1-8545-65238de20248" containerName="dnsmasq-dns" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.627577 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="677c3be8-2587-44d1-8545-65238de20248" containerName="dnsmasq-dns" Oct 01 05:46:23 crc kubenswrapper[4661]: E1001 05:46:23.627754 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12b98504-ea2f-4b12-a55e-5c3f514817a4" containerName="config-reloader" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.627834 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="12b98504-ea2f-4b12-a55e-5c3f514817a4" containerName="config-reloader" Oct 01 05:46:23 crc kubenswrapper[4661]: E1001 05:46:23.627939 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c08e846-393c-4f97-904d-f0a5c89a33e5" containerName="mariadb-database-create" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.628039 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c08e846-393c-4f97-904d-f0a5c89a33e5" containerName="mariadb-database-create" Oct 01 05:46:23 crc kubenswrapper[4661]: E1001 05:46:23.628114 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a6cf6dd-8146-4582-a2be-3525b97f43fa" containerName="mariadb-database-create" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.628189 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a6cf6dd-8146-4582-a2be-3525b97f43fa" containerName="mariadb-database-create" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.628485 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="12b98504-ea2f-4b12-a55e-5c3f514817a4" containerName="config-reloader" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.628588 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="56cf476f-5124-495f-b4b8-4899a31e4f63" containerName="mariadb-database-create" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.628790 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="677c3be8-2587-44d1-8545-65238de20248" containerName="dnsmasq-dns" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.628891 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="12b98504-ea2f-4b12-a55e-5c3f514817a4" containerName="thanos-sidecar" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.628972 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="12b98504-ea2f-4b12-a55e-5c3f514817a4" containerName="prometheus" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.629049 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c08e846-393c-4f97-904d-f0a5c89a33e5" containerName="mariadb-database-create" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.629125 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d068347-68e4-4719-8e0a-6a514729f385" containerName="mariadb-database-create" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.629202 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a6cf6dd-8146-4582-a2be-3525b97f43fa" containerName="mariadb-database-create" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.644946 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.648363 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.649280 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.649707 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.649752 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-rxwq4" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.659188 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.659574 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.660001 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.660458 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.713443 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-config\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.713499 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/2dde6251-f26b-4291-931e-30fce08578fd-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.713528 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/2dde6251-f26b-4291-931e-30fce08578fd-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.713558 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-70227665-af75-4dfe-9648-95f31cf3d818\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70227665-af75-4dfe-9648-95f31cf3d818\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.713576 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.713602 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.713791 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/2dde6251-f26b-4291-931e-30fce08578fd-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.713808 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbtdv\" (UniqueName: \"kubernetes.io/projected/2dde6251-f26b-4291-931e-30fce08578fd-kube-api-access-hbtdv\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.713858 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.713881 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.713918 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.767843 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12b98504-ea2f-4b12-a55e-5c3f514817a4" path="/var/lib/kubelet/pods/12b98504-ea2f-4b12-a55e-5c3f514817a4/volumes" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.815173 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.815411 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-config\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.815530 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/2dde6251-f26b-4291-931e-30fce08578fd-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.816274 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/2dde6251-f26b-4291-931e-30fce08578fd-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.816609 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-70227665-af75-4dfe-9648-95f31cf3d818\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70227665-af75-4dfe-9648-95f31cf3d818\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.817442 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.817547 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.817694 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/2dde6251-f26b-4291-931e-30fce08578fd-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.817778 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbtdv\" (UniqueName: \"kubernetes.io/projected/2dde6251-f26b-4291-931e-30fce08578fd-kube-api-access-hbtdv\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.817876 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.817960 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.816780 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/2dde6251-f26b-4291-931e-30fce08578fd-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.821553 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.821568 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-config\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.824378 4661 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.825126 4661 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-70227665-af75-4dfe-9648-95f31cf3d818\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70227665-af75-4dfe-9648-95f31cf3d818\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e51bf99238560523215dee685b077c67fdd0498f27e19b8c5ba6a080034e1ca7/globalmount\"" pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.824894 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.827291 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.828151 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/2dde6251-f26b-4291-931e-30fce08578fd-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.828493 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.828893 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.839069 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/2dde6251-f26b-4291-931e-30fce08578fd-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.841545 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbtdv\" (UniqueName: \"kubernetes.io/projected/2dde6251-f26b-4291-931e-30fce08578fd-kube-api-access-hbtdv\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.869056 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-70227665-af75-4dfe-9648-95f31cf3d818\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70227665-af75-4dfe-9648-95f31cf3d818\") pod \"prometheus-metric-storage-0\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:23 crc kubenswrapper[4661]: I1001 05:46:23.973913 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:24 crc kubenswrapper[4661]: I1001 05:46:24.086066 4661 scope.go:117] "RemoveContainer" containerID="e88da81919f1cf59bd5361fe51f9e5e37ba1e82f639bf34da7f45995d01cb087" Oct 01 05:46:24 crc kubenswrapper[4661]: I1001 05:46:24.118310 4661 scope.go:117] "RemoveContainer" containerID="b84b9721d6dd02868e54896137a5dbea2f3a8a9dee5a151bf3687af9dab86b7a" Oct 01 05:46:24 crc kubenswrapper[4661]: I1001 05:46:24.197522 4661 scope.go:117] "RemoveContainer" containerID="2a44bb32803e50fb7c3b62fe4aa9440b3daaec63859584cbee4f9639a0a5151d" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:24.792768 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 01 05:46:27 crc kubenswrapper[4661]: W1001 05:46:24.810193 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2dde6251_f26b_4291_931e_30fce08578fd.slice/crio-10d6b719d61b5ab549f31fbb616c89cb907d163fcb8468afc01c25bdc17016d8 WatchSource:0}: Error finding container 10d6b719d61b5ab549f31fbb616c89cb907d163fcb8468afc01c25bdc17016d8: Status 404 returned error can't find the container with id 10d6b719d61b5ab549f31fbb616c89cb907d163fcb8468afc01c25bdc17016d8 Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:24.877226 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-8557-account-create-xgf4w"] Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:24.878539 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8557-account-create-xgf4w" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:24.880504 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:24.884959 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-8557-account-create-xgf4w"] Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:24.940859 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zm47l\" (UniqueName: \"kubernetes.io/projected/d4ee810c-d045-46b4-bcb8-1c2490123d06-kube-api-access-zm47l\") pod \"keystone-8557-account-create-xgf4w\" (UID: \"d4ee810c-d045-46b4-bcb8-1c2490123d06\") " pod="openstack/keystone-8557-account-create-xgf4w" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:25.041975 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zm47l\" (UniqueName: \"kubernetes.io/projected/d4ee810c-d045-46b4-bcb8-1c2490123d06-kube-api-access-zm47l\") pod \"keystone-8557-account-create-xgf4w\" (UID: \"d4ee810c-d045-46b4-bcb8-1c2490123d06\") " pod="openstack/keystone-8557-account-create-xgf4w" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:25.070269 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zm47l\" (UniqueName: \"kubernetes.io/projected/d4ee810c-d045-46b4-bcb8-1c2490123d06-kube-api-access-zm47l\") pod \"keystone-8557-account-create-xgf4w\" (UID: \"d4ee810c-d045-46b4-bcb8-1c2490123d06\") " pod="openstack/keystone-8557-account-create-xgf4w" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:25.226024 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8557-account-create-xgf4w" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:25.561986 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2dde6251-f26b-4291-931e-30fce08578fd","Type":"ContainerStarted","Data":"10d6b719d61b5ab549f31fbb616c89cb907d163fcb8468afc01c25bdc17016d8"} Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.172760 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-mplg4" podUID="c5001332-068e-46eb-a21c-25e29832baab" containerName="ovn-controller" probeResult="failure" output=< Oct 01 05:46:27 crc kubenswrapper[4661]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 01 05:46:27 crc kubenswrapper[4661]: > Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.482455 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.484351 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-wmxcl" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.576565 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e08e6861-2a19-4c40-8ed3-aeb2662d75bd","Type":"ContainerStarted","Data":"9fc4026fb4058607c65864a4816eb80dcaec771be02bfbca3509f9e8f8876aed"} Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.629234 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=23.67204213 podStartE2EDuration="29.629218429s" podCreationTimestamp="2025-10-01 05:45:57 +0000 UTC" firstStartedPulling="2025-10-01 05:46:15.071809825 +0000 UTC m=+1024.009788439" lastFinishedPulling="2025-10-01 05:46:21.028986124 +0000 UTC m=+1029.966964738" observedRunningTime="2025-10-01 05:46:26.620245092 +0000 UTC m=+1035.558223726" watchObservedRunningTime="2025-10-01 05:46:26.629218429 +0000 UTC m=+1035.567197043" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.751425 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-mplg4-config-dlhk9"] Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.756148 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mplg4-config-dlhk9" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.774344 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.810813 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-mplg4-config-dlhk9"] Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.878047 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6c4px\" (UniqueName: \"kubernetes.io/projected/78280332-bedc-4f17-a2df-140950b99d79-kube-api-access-6c4px\") pod \"ovn-controller-mplg4-config-dlhk9\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " pod="openstack/ovn-controller-mplg4-config-dlhk9" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.878198 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/78280332-bedc-4f17-a2df-140950b99d79-var-run-ovn\") pod \"ovn-controller-mplg4-config-dlhk9\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " pod="openstack/ovn-controller-mplg4-config-dlhk9" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.878262 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/78280332-bedc-4f17-a2df-140950b99d79-var-run\") pod \"ovn-controller-mplg4-config-dlhk9\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " pod="openstack/ovn-controller-mplg4-config-dlhk9" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.878325 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/78280332-bedc-4f17-a2df-140950b99d79-additional-scripts\") pod \"ovn-controller-mplg4-config-dlhk9\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " pod="openstack/ovn-controller-mplg4-config-dlhk9" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.878382 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78280332-bedc-4f17-a2df-140950b99d79-scripts\") pod \"ovn-controller-mplg4-config-dlhk9\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " pod="openstack/ovn-controller-mplg4-config-dlhk9" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.878445 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/78280332-bedc-4f17-a2df-140950b99d79-var-log-ovn\") pod \"ovn-controller-mplg4-config-dlhk9\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " pod="openstack/ovn-controller-mplg4-config-dlhk9" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.897307 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55b99bf79c-xpfjw"] Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.898677 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.902240 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.912481 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55b99bf79c-xpfjw"] Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.979735 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ds7cp\" (UniqueName: \"kubernetes.io/projected/29234de0-146d-457e-9b25-f0d8804cf06e-kube-api-access-ds7cp\") pod \"dnsmasq-dns-55b99bf79c-xpfjw\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.979798 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/78280332-bedc-4f17-a2df-140950b99d79-var-run-ovn\") pod \"ovn-controller-mplg4-config-dlhk9\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " pod="openstack/ovn-controller-mplg4-config-dlhk9" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.979841 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-ovsdbserver-nb\") pod \"dnsmasq-dns-55b99bf79c-xpfjw\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.979868 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/78280332-bedc-4f17-a2df-140950b99d79-var-run\") pod \"ovn-controller-mplg4-config-dlhk9\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " pod="openstack/ovn-controller-mplg4-config-dlhk9" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.979890 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-dns-swift-storage-0\") pod \"dnsmasq-dns-55b99bf79c-xpfjw\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.979941 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/78280332-bedc-4f17-a2df-140950b99d79-additional-scripts\") pod \"ovn-controller-mplg4-config-dlhk9\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " pod="openstack/ovn-controller-mplg4-config-dlhk9" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.979992 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78280332-bedc-4f17-a2df-140950b99d79-scripts\") pod \"ovn-controller-mplg4-config-dlhk9\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " pod="openstack/ovn-controller-mplg4-config-dlhk9" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.980012 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-config\") pod \"dnsmasq-dns-55b99bf79c-xpfjw\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.980044 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/78280332-bedc-4f17-a2df-140950b99d79-var-log-ovn\") pod \"ovn-controller-mplg4-config-dlhk9\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " pod="openstack/ovn-controller-mplg4-config-dlhk9" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.980076 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-ovsdbserver-sb\") pod \"dnsmasq-dns-55b99bf79c-xpfjw\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.980099 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6c4px\" (UniqueName: \"kubernetes.io/projected/78280332-bedc-4f17-a2df-140950b99d79-kube-api-access-6c4px\") pod \"ovn-controller-mplg4-config-dlhk9\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " pod="openstack/ovn-controller-mplg4-config-dlhk9" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.980133 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-dns-svc\") pod \"dnsmasq-dns-55b99bf79c-xpfjw\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.980168 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/78280332-bedc-4f17-a2df-140950b99d79-var-run-ovn\") pod \"ovn-controller-mplg4-config-dlhk9\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " pod="openstack/ovn-controller-mplg4-config-dlhk9" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.980244 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/78280332-bedc-4f17-a2df-140950b99d79-var-run\") pod \"ovn-controller-mplg4-config-dlhk9\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " pod="openstack/ovn-controller-mplg4-config-dlhk9" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.980363 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/78280332-bedc-4f17-a2df-140950b99d79-var-log-ovn\") pod \"ovn-controller-mplg4-config-dlhk9\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " pod="openstack/ovn-controller-mplg4-config-dlhk9" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.980920 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/78280332-bedc-4f17-a2df-140950b99d79-additional-scripts\") pod \"ovn-controller-mplg4-config-dlhk9\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " pod="openstack/ovn-controller-mplg4-config-dlhk9" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.982323 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78280332-bedc-4f17-a2df-140950b99d79-scripts\") pod \"ovn-controller-mplg4-config-dlhk9\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " pod="openstack/ovn-controller-mplg4-config-dlhk9" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:26.997802 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6c4px\" (UniqueName: \"kubernetes.io/projected/78280332-bedc-4f17-a2df-140950b99d79-kube-api-access-6c4px\") pod \"ovn-controller-mplg4-config-dlhk9\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " pod="openstack/ovn-controller-mplg4-config-dlhk9" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.044590 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-8f3c-account-create-55d7t"] Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.045726 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-8f3c-account-create-55d7t" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.047750 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-db-secret" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.050033 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-8f3c-account-create-55d7t"] Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.081760 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-dns-svc\") pod \"dnsmasq-dns-55b99bf79c-xpfjw\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.081810 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ds7cp\" (UniqueName: \"kubernetes.io/projected/29234de0-146d-457e-9b25-f0d8804cf06e-kube-api-access-ds7cp\") pod \"dnsmasq-dns-55b99bf79c-xpfjw\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.081842 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gms84\" (UniqueName: \"kubernetes.io/projected/c05131a9-6d9f-486d-8f8d-6667e3e65506-kube-api-access-gms84\") pod \"watcher-8f3c-account-create-55d7t\" (UID: \"c05131a9-6d9f-486d-8f8d-6667e3e65506\") " pod="openstack/watcher-8f3c-account-create-55d7t" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.081875 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-ovsdbserver-nb\") pod \"dnsmasq-dns-55b99bf79c-xpfjw\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.081894 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-dns-swift-storage-0\") pod \"dnsmasq-dns-55b99bf79c-xpfjw\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.081953 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-config\") pod \"dnsmasq-dns-55b99bf79c-xpfjw\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.081982 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-ovsdbserver-sb\") pod \"dnsmasq-dns-55b99bf79c-xpfjw\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.082726 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-ovsdbserver-sb\") pod \"dnsmasq-dns-55b99bf79c-xpfjw\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.083210 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-dns-svc\") pod \"dnsmasq-dns-55b99bf79c-xpfjw\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.083933 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-ovsdbserver-nb\") pod \"dnsmasq-dns-55b99bf79c-xpfjw\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.084398 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-dns-swift-storage-0\") pod \"dnsmasq-dns-55b99bf79c-xpfjw\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.084889 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-config\") pod \"dnsmasq-dns-55b99bf79c-xpfjw\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.097968 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ds7cp\" (UniqueName: \"kubernetes.io/projected/29234de0-146d-457e-9b25-f0d8804cf06e-kube-api-access-ds7cp\") pod \"dnsmasq-dns-55b99bf79c-xpfjw\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.154611 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mplg4-config-dlhk9" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.183602 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gms84\" (UniqueName: \"kubernetes.io/projected/c05131a9-6d9f-486d-8f8d-6667e3e65506-kube-api-access-gms84\") pod \"watcher-8f3c-account-create-55d7t\" (UID: \"c05131a9-6d9f-486d-8f8d-6667e3e65506\") " pod="openstack/watcher-8f3c-account-create-55d7t" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.202393 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gms84\" (UniqueName: \"kubernetes.io/projected/c05131a9-6d9f-486d-8f8d-6667e3e65506-kube-api-access-gms84\") pod \"watcher-8f3c-account-create-55d7t\" (UID: \"c05131a9-6d9f-486d-8f8d-6667e3e65506\") " pod="openstack/watcher-8f3c-account-create-55d7t" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.217425 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.363101 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-8f3c-account-create-55d7t" Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.573729 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-8557-account-create-xgf4w"] Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.670489 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-mplg4-config-dlhk9"] Oct 01 05:46:27 crc kubenswrapper[4661]: I1001 05:46:27.776421 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55b99bf79c-xpfjw"] Oct 01 05:46:28 crc kubenswrapper[4661]: I1001 05:46:28.005431 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-8f3c-account-create-55d7t"] Oct 01 05:46:28 crc kubenswrapper[4661]: W1001 05:46:28.018035 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc05131a9_6d9f_486d_8f8d_6667e3e65506.slice/crio-23ebbf795e6c3c0484609b1d317ff84f28b901bf0a5c772a18c86ecb9f325f2d WatchSource:0}: Error finding container 23ebbf795e6c3c0484609b1d317ff84f28b901bf0a5c772a18c86ecb9f325f2d: Status 404 returned error can't find the container with id 23ebbf795e6c3c0484609b1d317ff84f28b901bf0a5c772a18c86ecb9f325f2d Oct 01 05:46:28 crc kubenswrapper[4661]: I1001 05:46:28.612302 4661 generic.go:334] "Generic (PLEG): container finished" podID="78280332-bedc-4f17-a2df-140950b99d79" containerID="33c88f372e0330b3ed7784e5237e5ef58e8d698e6604b7c3dee3210e757d9427" exitCode=0 Oct 01 05:46:28 crc kubenswrapper[4661]: I1001 05:46:28.612848 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-mplg4-config-dlhk9" event={"ID":"78280332-bedc-4f17-a2df-140950b99d79","Type":"ContainerDied","Data":"33c88f372e0330b3ed7784e5237e5ef58e8d698e6604b7c3dee3210e757d9427"} Oct 01 05:46:28 crc kubenswrapper[4661]: I1001 05:46:28.612911 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-mplg4-config-dlhk9" event={"ID":"78280332-bedc-4f17-a2df-140950b99d79","Type":"ContainerStarted","Data":"950638d4463b7c2f933576b3cec738f94f89f5e6dd6f13691d391d24ee6ebde1"} Oct 01 05:46:28 crc kubenswrapper[4661]: I1001 05:46:28.615371 4661 generic.go:334] "Generic (PLEG): container finished" podID="c05131a9-6d9f-486d-8f8d-6667e3e65506" containerID="2bac3f72ee1aac17e57ba63180c86a042583a1903d0259a8229234b58774ecc8" exitCode=0 Oct 01 05:46:28 crc kubenswrapper[4661]: I1001 05:46:28.615500 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-8f3c-account-create-55d7t" event={"ID":"c05131a9-6d9f-486d-8f8d-6667e3e65506","Type":"ContainerDied","Data":"2bac3f72ee1aac17e57ba63180c86a042583a1903d0259a8229234b58774ecc8"} Oct 01 05:46:28 crc kubenswrapper[4661]: I1001 05:46:28.615570 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-8f3c-account-create-55d7t" event={"ID":"c05131a9-6d9f-486d-8f8d-6667e3e65506","Type":"ContainerStarted","Data":"23ebbf795e6c3c0484609b1d317ff84f28b901bf0a5c772a18c86ecb9f325f2d"} Oct 01 05:46:28 crc kubenswrapper[4661]: I1001 05:46:28.621969 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2dde6251-f26b-4291-931e-30fce08578fd","Type":"ContainerStarted","Data":"0306c5f0e811f1e2b0cf05468bc2bd80e46ab8aa58a9063b21bb3aae9898a509"} Oct 01 05:46:28 crc kubenswrapper[4661]: I1001 05:46:28.632887 4661 generic.go:334] "Generic (PLEG): container finished" podID="d4ee810c-d045-46b4-bcb8-1c2490123d06" containerID="689a4089c94bdf3f4f4224deb013107deb48b0e7125eb729cb6c30e5a31c1f61" exitCode=0 Oct 01 05:46:28 crc kubenswrapper[4661]: I1001 05:46:28.633017 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-8557-account-create-xgf4w" event={"ID":"d4ee810c-d045-46b4-bcb8-1c2490123d06","Type":"ContainerDied","Data":"689a4089c94bdf3f4f4224deb013107deb48b0e7125eb729cb6c30e5a31c1f61"} Oct 01 05:46:28 crc kubenswrapper[4661]: I1001 05:46:28.633125 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-8557-account-create-xgf4w" event={"ID":"d4ee810c-d045-46b4-bcb8-1c2490123d06","Type":"ContainerStarted","Data":"3f094cc39b36337d17feab024567678f11da1e88c6830f3b58a902a97856a2c6"} Oct 01 05:46:28 crc kubenswrapper[4661]: I1001 05:46:28.642732 4661 generic.go:334] "Generic (PLEG): container finished" podID="29234de0-146d-457e-9b25-f0d8804cf06e" containerID="3ac5f85e88d4e02d7e739333fea968d1df792540624cbf881e0ad2d3a5e70bbc" exitCode=0 Oct 01 05:46:28 crc kubenswrapper[4661]: I1001 05:46:28.642804 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" event={"ID":"29234de0-146d-457e-9b25-f0d8804cf06e","Type":"ContainerDied","Data":"3ac5f85e88d4e02d7e739333fea968d1df792540624cbf881e0ad2d3a5e70bbc"} Oct 01 05:46:28 crc kubenswrapper[4661]: I1001 05:46:28.642847 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" event={"ID":"29234de0-146d-457e-9b25-f0d8804cf06e","Type":"ContainerStarted","Data":"96d8e78f6354368eb87106f17f6c409d9630d2ee43994c04bf3b93f3a4708732"} Oct 01 05:46:29 crc kubenswrapper[4661]: I1001 05:46:29.656991 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" event={"ID":"29234de0-146d-457e-9b25-f0d8804cf06e","Type":"ContainerStarted","Data":"138b9d7cb946db5cddc0facb6398bc5a4bafc87abc0b888c399b94904e85b0fb"} Oct 01 05:46:29 crc kubenswrapper[4661]: I1001 05:46:29.658005 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:46:29 crc kubenswrapper[4661]: I1001 05:46:29.699205 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" podStartSLOduration=3.699179324 podStartE2EDuration="3.699179324s" podCreationTimestamp="2025-10-01 05:46:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:46:29.694180056 +0000 UTC m=+1038.632158710" watchObservedRunningTime="2025-10-01 05:46:29.699179324 +0000 UTC m=+1038.637157968" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.188298 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8557-account-create-xgf4w" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.296812 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-8f3c-account-create-55d7t" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.306554 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mplg4-config-dlhk9" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.345711 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zm47l\" (UniqueName: \"kubernetes.io/projected/d4ee810c-d045-46b4-bcb8-1c2490123d06-kube-api-access-zm47l\") pod \"d4ee810c-d045-46b4-bcb8-1c2490123d06\" (UID: \"d4ee810c-d045-46b4-bcb8-1c2490123d06\") " Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.374527 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4ee810c-d045-46b4-bcb8-1c2490123d06-kube-api-access-zm47l" (OuterVolumeSpecName: "kube-api-access-zm47l") pod "d4ee810c-d045-46b4-bcb8-1c2490123d06" (UID: "d4ee810c-d045-46b4-bcb8-1c2490123d06"). InnerVolumeSpecName "kube-api-access-zm47l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.447458 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/78280332-bedc-4f17-a2df-140950b99d79-var-run-ovn\") pod \"78280332-bedc-4f17-a2df-140950b99d79\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.447527 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78280332-bedc-4f17-a2df-140950b99d79-scripts\") pod \"78280332-bedc-4f17-a2df-140950b99d79\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.447536 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/78280332-bedc-4f17-a2df-140950b99d79-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "78280332-bedc-4f17-a2df-140950b99d79" (UID: "78280332-bedc-4f17-a2df-140950b99d79"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.447878 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/78280332-bedc-4f17-a2df-140950b99d79-additional-scripts\") pod \"78280332-bedc-4f17-a2df-140950b99d79\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.448007 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/78280332-bedc-4f17-a2df-140950b99d79-var-run\") pod \"78280332-bedc-4f17-a2df-140950b99d79\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.448034 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6c4px\" (UniqueName: \"kubernetes.io/projected/78280332-bedc-4f17-a2df-140950b99d79-kube-api-access-6c4px\") pod \"78280332-bedc-4f17-a2df-140950b99d79\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.448060 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gms84\" (UniqueName: \"kubernetes.io/projected/c05131a9-6d9f-486d-8f8d-6667e3e65506-kube-api-access-gms84\") pod \"c05131a9-6d9f-486d-8f8d-6667e3e65506\" (UID: \"c05131a9-6d9f-486d-8f8d-6667e3e65506\") " Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.448088 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/78280332-bedc-4f17-a2df-140950b99d79-var-log-ovn\") pod \"78280332-bedc-4f17-a2df-140950b99d79\" (UID: \"78280332-bedc-4f17-a2df-140950b99d79\") " Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.448150 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/78280332-bedc-4f17-a2df-140950b99d79-var-run" (OuterVolumeSpecName: "var-run") pod "78280332-bedc-4f17-a2df-140950b99d79" (UID: "78280332-bedc-4f17-a2df-140950b99d79"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.448330 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/78280332-bedc-4f17-a2df-140950b99d79-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "78280332-bedc-4f17-a2df-140950b99d79" (UID: "78280332-bedc-4f17-a2df-140950b99d79"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.448523 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78280332-bedc-4f17-a2df-140950b99d79-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "78280332-bedc-4f17-a2df-140950b99d79" (UID: "78280332-bedc-4f17-a2df-140950b99d79"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.448659 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78280332-bedc-4f17-a2df-140950b99d79-scripts" (OuterVolumeSpecName: "scripts") pod "78280332-bedc-4f17-a2df-140950b99d79" (UID: "78280332-bedc-4f17-a2df-140950b99d79"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.448701 4661 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/78280332-bedc-4f17-a2df-140950b99d79-var-run\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.448717 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zm47l\" (UniqueName: \"kubernetes.io/projected/d4ee810c-d045-46b4-bcb8-1c2490123d06-kube-api-access-zm47l\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.448726 4661 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/78280332-bedc-4f17-a2df-140950b99d79-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.448734 4661 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/78280332-bedc-4f17-a2df-140950b99d79-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.448743 4661 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/78280332-bedc-4f17-a2df-140950b99d79-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.450794 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c05131a9-6d9f-486d-8f8d-6667e3e65506-kube-api-access-gms84" (OuterVolumeSpecName: "kube-api-access-gms84") pod "c05131a9-6d9f-486d-8f8d-6667e3e65506" (UID: "c05131a9-6d9f-486d-8f8d-6667e3e65506"). InnerVolumeSpecName "kube-api-access-gms84". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.451865 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78280332-bedc-4f17-a2df-140950b99d79-kube-api-access-6c4px" (OuterVolumeSpecName: "kube-api-access-6c4px") pod "78280332-bedc-4f17-a2df-140950b99d79" (UID: "78280332-bedc-4f17-a2df-140950b99d79"). InnerVolumeSpecName "kube-api-access-6c4px". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.551188 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6c4px\" (UniqueName: \"kubernetes.io/projected/78280332-bedc-4f17-a2df-140950b99d79-kube-api-access-6c4px\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.551590 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gms84\" (UniqueName: \"kubernetes.io/projected/c05131a9-6d9f-486d-8f8d-6667e3e65506-kube-api-access-gms84\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.551757 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78280332-bedc-4f17-a2df-140950b99d79-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.670341 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-mplg4-config-dlhk9" event={"ID":"78280332-bedc-4f17-a2df-140950b99d79","Type":"ContainerDied","Data":"950638d4463b7c2f933576b3cec738f94f89f5e6dd6f13691d391d24ee6ebde1"} Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.670391 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="950638d4463b7c2f933576b3cec738f94f89f5e6dd6f13691d391d24ee6ebde1" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.671182 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mplg4-config-dlhk9" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.673069 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-8f3c-account-create-55d7t" event={"ID":"c05131a9-6d9f-486d-8f8d-6667e3e65506","Type":"ContainerDied","Data":"23ebbf795e6c3c0484609b1d317ff84f28b901bf0a5c772a18c86ecb9f325f2d"} Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.673102 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="23ebbf795e6c3c0484609b1d317ff84f28b901bf0a5c772a18c86ecb9f325f2d" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.673133 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-8f3c-account-create-55d7t" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.676020 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-8557-account-create-xgf4w" event={"ID":"d4ee810c-d045-46b4-bcb8-1c2490123d06","Type":"ContainerDied","Data":"3f094cc39b36337d17feab024567678f11da1e88c6830f3b58a902a97856a2c6"} Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.676084 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f094cc39b36337d17feab024567678f11da1e88c6830f3b58a902a97856a2c6" Oct 01 05:46:30 crc kubenswrapper[4661]: I1001 05:46:30.676044 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8557-account-create-xgf4w" Oct 01 05:46:31 crc kubenswrapper[4661]: I1001 05:46:31.186381 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-mplg4" Oct 01 05:46:31 crc kubenswrapper[4661]: I1001 05:46:31.448367 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-mplg4-config-dlhk9"] Oct 01 05:46:31 crc kubenswrapper[4661]: I1001 05:46:31.456291 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-mplg4-config-dlhk9"] Oct 01 05:46:31 crc kubenswrapper[4661]: I1001 05:46:31.768624 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78280332-bedc-4f17-a2df-140950b99d79" path="/var/lib/kubelet/pods/78280332-bedc-4f17-a2df-140950b99d79/volumes" Oct 01 05:46:34 crc kubenswrapper[4661]: I1001 05:46:34.719551 4661 generic.go:334] "Generic (PLEG): container finished" podID="31336b4a-1953-44ab-b229-401a3a3ac031" containerID="e4b706e5b85bcc690eca0586716d67b71f05d9134448ccee9f89b6082624960a" exitCode=0 Oct 01 05:46:34 crc kubenswrapper[4661]: I1001 05:46:34.719774 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"31336b4a-1953-44ab-b229-401a3a3ac031","Type":"ContainerDied","Data":"e4b706e5b85bcc690eca0586716d67b71f05d9134448ccee9f89b6082624960a"} Oct 01 05:46:34 crc kubenswrapper[4661]: I1001 05:46:34.724642 4661 generic.go:334] "Generic (PLEG): container finished" podID="1658ccd7-4bae-45bf-aa67-fc5c075a417c" containerID="85263945289c994cc57d153cf978cfa98d5db684e64dc605627246425cabbaee" exitCode=0 Oct 01 05:46:34 crc kubenswrapper[4661]: I1001 05:46:34.724685 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"1658ccd7-4bae-45bf-aa67-fc5c075a417c","Type":"ContainerDied","Data":"85263945289c994cc57d153cf978cfa98d5db684e64dc605627246425cabbaee"} Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.099367 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-c0e5-account-create-nx68n"] Oct 01 05:46:35 crc kubenswrapper[4661]: E1001 05:46:35.100128 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78280332-bedc-4f17-a2df-140950b99d79" containerName="ovn-config" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.100150 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="78280332-bedc-4f17-a2df-140950b99d79" containerName="ovn-config" Oct 01 05:46:35 crc kubenswrapper[4661]: E1001 05:46:35.100165 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4ee810c-d045-46b4-bcb8-1c2490123d06" containerName="mariadb-account-create" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.100174 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4ee810c-d045-46b4-bcb8-1c2490123d06" containerName="mariadb-account-create" Oct 01 05:46:35 crc kubenswrapper[4661]: E1001 05:46:35.100187 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c05131a9-6d9f-486d-8f8d-6667e3e65506" containerName="mariadb-account-create" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.100195 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="c05131a9-6d9f-486d-8f8d-6667e3e65506" containerName="mariadb-account-create" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.100405 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4ee810c-d045-46b4-bcb8-1c2490123d06" containerName="mariadb-account-create" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.100430 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="c05131a9-6d9f-486d-8f8d-6667e3e65506" containerName="mariadb-account-create" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.100444 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="78280332-bedc-4f17-a2df-140950b99d79" containerName="ovn-config" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.101138 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c0e5-account-create-nx68n" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.103312 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.121983 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-c0e5-account-create-nx68n"] Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.249668 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4ldf\" (UniqueName: \"kubernetes.io/projected/fc52c0b9-60dc-4c50-9022-d993371171ec-kube-api-access-x4ldf\") pod \"placement-c0e5-account-create-nx68n\" (UID: \"fc52c0b9-60dc-4c50-9022-d993371171ec\") " pod="openstack/placement-c0e5-account-create-nx68n" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.350570 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4ldf\" (UniqueName: \"kubernetes.io/projected/fc52c0b9-60dc-4c50-9022-d993371171ec-kube-api-access-x4ldf\") pod \"placement-c0e5-account-create-nx68n\" (UID: \"fc52c0b9-60dc-4c50-9022-d993371171ec\") " pod="openstack/placement-c0e5-account-create-nx68n" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.372749 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4ldf\" (UniqueName: \"kubernetes.io/projected/fc52c0b9-60dc-4c50-9022-d993371171ec-kube-api-access-x4ldf\") pod \"placement-c0e5-account-create-nx68n\" (UID: \"fc52c0b9-60dc-4c50-9022-d993371171ec\") " pod="openstack/placement-c0e5-account-create-nx68n" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.417756 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c0e5-account-create-nx68n" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.534225 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-ca83-account-create-wxd5d"] Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.535382 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-ca83-account-create-wxd5d" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.541518 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.555198 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-ca83-account-create-wxd5d"] Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.661386 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lb5m5\" (UniqueName: \"kubernetes.io/projected/f35ff2fe-28c0-4815-a674-7b063a959b28-kube-api-access-lb5m5\") pod \"glance-ca83-account-create-wxd5d\" (UID: \"f35ff2fe-28c0-4815-a674-7b063a959b28\") " pod="openstack/glance-ca83-account-create-wxd5d" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.738145 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"31336b4a-1953-44ab-b229-401a3a3ac031","Type":"ContainerStarted","Data":"2ada95fe4bda404b5dd58857619ad25cdc3c3801d25f51ee4e55f0ba0fc2deea"} Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.739101 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.744697 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"1658ccd7-4bae-45bf-aa67-fc5c075a417c","Type":"ContainerStarted","Data":"5de4c854a4ba7f0f1c344185deeef023856ea1800f8f48ec69eddcfeb0ec8d74"} Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.745160 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.764599 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lb5m5\" (UniqueName: \"kubernetes.io/projected/f35ff2fe-28c0-4815-a674-7b063a959b28-kube-api-access-lb5m5\") pod \"glance-ca83-account-create-wxd5d\" (UID: \"f35ff2fe-28c0-4815-a674-7b063a959b28\") " pod="openstack/glance-ca83-account-create-wxd5d" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.775577 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.284389901 podStartE2EDuration="1m26.775559724s" podCreationTimestamp="2025-10-01 05:45:09 +0000 UTC" firstStartedPulling="2025-10-01 05:45:11.471956029 +0000 UTC m=+960.409934643" lastFinishedPulling="2025-10-01 05:46:00.963125852 +0000 UTC m=+1009.901104466" observedRunningTime="2025-10-01 05:46:35.768739246 +0000 UTC m=+1044.706717860" watchObservedRunningTime="2025-10-01 05:46:35.775559724 +0000 UTC m=+1044.713538338" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.797475 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lb5m5\" (UniqueName: \"kubernetes.io/projected/f35ff2fe-28c0-4815-a674-7b063a959b28-kube-api-access-lb5m5\") pod \"glance-ca83-account-create-wxd5d\" (UID: \"f35ff2fe-28c0-4815-a674-7b063a959b28\") " pod="openstack/glance-ca83-account-create-wxd5d" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.861959 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-ca83-account-create-wxd5d" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.938082 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-notifications-server-0" podStartSLOduration=36.965738497 podStartE2EDuration="1m25.938065236s" podCreationTimestamp="2025-10-01 05:45:10 +0000 UTC" firstStartedPulling="2025-10-01 05:45:11.989785475 +0000 UTC m=+960.927764089" lastFinishedPulling="2025-10-01 05:46:00.962112214 +0000 UTC m=+1009.900090828" observedRunningTime="2025-10-01 05:46:35.831977417 +0000 UTC m=+1044.769956031" watchObservedRunningTime="2025-10-01 05:46:35.938065236 +0000 UTC m=+1044.876043850" Oct 01 05:46:35 crc kubenswrapper[4661]: I1001 05:46:35.943887 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-c0e5-account-create-nx68n"] Oct 01 05:46:36 crc kubenswrapper[4661]: I1001 05:46:36.343266 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-ca83-account-create-wxd5d"] Oct 01 05:46:36 crc kubenswrapper[4661]: W1001 05:46:36.346730 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf35ff2fe_28c0_4815_a674_7b063a959b28.slice/crio-066f93f8c9837d878b0d37e834b61948d1377b41947b2b4fd1846edf215d7e3e WatchSource:0}: Error finding container 066f93f8c9837d878b0d37e834b61948d1377b41947b2b4fd1846edf215d7e3e: Status 404 returned error can't find the container with id 066f93f8c9837d878b0d37e834b61948d1377b41947b2b4fd1846edf215d7e3e Oct 01 05:46:36 crc kubenswrapper[4661]: I1001 05:46:36.760891 4661 generic.go:334] "Generic (PLEG): container finished" podID="fc52c0b9-60dc-4c50-9022-d993371171ec" containerID="e972e3b1fb007ffaa726df923cee4704571fd53fe9696a0072257c80ed170e09" exitCode=0 Oct 01 05:46:36 crc kubenswrapper[4661]: I1001 05:46:36.761247 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-c0e5-account-create-nx68n" event={"ID":"fc52c0b9-60dc-4c50-9022-d993371171ec","Type":"ContainerDied","Data":"e972e3b1fb007ffaa726df923cee4704571fd53fe9696a0072257c80ed170e09"} Oct 01 05:46:36 crc kubenswrapper[4661]: I1001 05:46:36.761279 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-c0e5-account-create-nx68n" event={"ID":"fc52c0b9-60dc-4c50-9022-d993371171ec","Type":"ContainerStarted","Data":"c0adeafc4b000f4539de7ffbf91e2e6b56bdfc6964d96978f3d0a8d5fbad3652"} Oct 01 05:46:36 crc kubenswrapper[4661]: I1001 05:46:36.764323 4661 generic.go:334] "Generic (PLEG): container finished" podID="f35ff2fe-28c0-4815-a674-7b063a959b28" containerID="dad4e7ec501be993ab367ab7f124033437f83dc1184c88ebdbc714b1dd4d44ff" exitCode=0 Oct 01 05:46:36 crc kubenswrapper[4661]: I1001 05:46:36.765269 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-ca83-account-create-wxd5d" event={"ID":"f35ff2fe-28c0-4815-a674-7b063a959b28","Type":"ContainerDied","Data":"dad4e7ec501be993ab367ab7f124033437f83dc1184c88ebdbc714b1dd4d44ff"} Oct 01 05:46:36 crc kubenswrapper[4661]: I1001 05:46:36.765317 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-ca83-account-create-wxd5d" event={"ID":"f35ff2fe-28c0-4815-a674-7b063a959b28","Type":"ContainerStarted","Data":"066f93f8c9837d878b0d37e834b61948d1377b41947b2b4fd1846edf215d7e3e"} Oct 01 05:46:37 crc kubenswrapper[4661]: I1001 05:46:37.226018 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:46:37 crc kubenswrapper[4661]: I1001 05:46:37.306919 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76f9c4c8bc-xbhc8"] Oct 01 05:46:37 crc kubenswrapper[4661]: I1001 05:46:37.307203 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" podUID="0ba34d64-f5f8-4543-91ad-deddaaa978fc" containerName="dnsmasq-dns" containerID="cri-o://62b3b78dff97f3c1c36180dbd4b71ae2f1dfac2dd35b4f4c53dab3136bdb2882" gracePeriod=10 Oct 01 05:46:37 crc kubenswrapper[4661]: E1001 05:46:37.394356 4661 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ba34d64_f5f8_4543_91ad_deddaaa978fc.slice/crio-62b3b78dff97f3c1c36180dbd4b71ae2f1dfac2dd35b4f4c53dab3136bdb2882.scope\": RecentStats: unable to find data in memory cache]" Oct 01 05:46:37 crc kubenswrapper[4661]: I1001 05:46:37.774274 4661 generic.go:334] "Generic (PLEG): container finished" podID="0ba34d64-f5f8-4543-91ad-deddaaa978fc" containerID="62b3b78dff97f3c1c36180dbd4b71ae2f1dfac2dd35b4f4c53dab3136bdb2882" exitCode=0 Oct 01 05:46:37 crc kubenswrapper[4661]: I1001 05:46:37.774433 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" event={"ID":"0ba34d64-f5f8-4543-91ad-deddaaa978fc","Type":"ContainerDied","Data":"62b3b78dff97f3c1c36180dbd4b71ae2f1dfac2dd35b4f4c53dab3136bdb2882"} Oct 01 05:46:37 crc kubenswrapper[4661]: I1001 05:46:37.777210 4661 generic.go:334] "Generic (PLEG): container finished" podID="2dde6251-f26b-4291-931e-30fce08578fd" containerID="0306c5f0e811f1e2b0cf05468bc2bd80e46ab8aa58a9063b21bb3aae9898a509" exitCode=0 Oct 01 05:46:37 crc kubenswrapper[4661]: I1001 05:46:37.777290 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2dde6251-f26b-4291-931e-30fce08578fd","Type":"ContainerDied","Data":"0306c5f0e811f1e2b0cf05468bc2bd80e46ab8aa58a9063b21bb3aae9898a509"} Oct 01 05:46:37 crc kubenswrapper[4661]: I1001 05:46:37.953422 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.039946 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-dns-svc\") pod \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\" (UID: \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\") " Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.040305 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-ovsdbserver-sb\") pod \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\" (UID: \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\") " Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.040408 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-ovsdbserver-nb\") pod \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\" (UID: \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\") " Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.040430 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzngg\" (UniqueName: \"kubernetes.io/projected/0ba34d64-f5f8-4543-91ad-deddaaa978fc-kube-api-access-wzngg\") pod \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\" (UID: \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\") " Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.040519 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-config\") pod \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\" (UID: \"0ba34d64-f5f8-4543-91ad-deddaaa978fc\") " Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.057940 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ba34d64-f5f8-4543-91ad-deddaaa978fc-kube-api-access-wzngg" (OuterVolumeSpecName: "kube-api-access-wzngg") pod "0ba34d64-f5f8-4543-91ad-deddaaa978fc" (UID: "0ba34d64-f5f8-4543-91ad-deddaaa978fc"). InnerVolumeSpecName "kube-api-access-wzngg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.102526 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0ba34d64-f5f8-4543-91ad-deddaaa978fc" (UID: "0ba34d64-f5f8-4543-91ad-deddaaa978fc"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.106227 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0ba34d64-f5f8-4543-91ad-deddaaa978fc" (UID: "0ba34d64-f5f8-4543-91ad-deddaaa978fc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.149624 4661 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.149733 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.149810 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzngg\" (UniqueName: \"kubernetes.io/projected/0ba34d64-f5f8-4543-91ad-deddaaa978fc-kube-api-access-wzngg\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.153008 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0ba34d64-f5f8-4543-91ad-deddaaa978fc" (UID: "0ba34d64-f5f8-4543-91ad-deddaaa978fc"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.155445 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-config" (OuterVolumeSpecName: "config") pod "0ba34d64-f5f8-4543-91ad-deddaaa978fc" (UID: "0ba34d64-f5f8-4543-91ad-deddaaa978fc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.216800 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-ca83-account-create-wxd5d" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.227324 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c0e5-account-create-nx68n" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.251237 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.251261 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0ba34d64-f5f8-4543-91ad-deddaaa978fc-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.351970 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4ldf\" (UniqueName: \"kubernetes.io/projected/fc52c0b9-60dc-4c50-9022-d993371171ec-kube-api-access-x4ldf\") pod \"fc52c0b9-60dc-4c50-9022-d993371171ec\" (UID: \"fc52c0b9-60dc-4c50-9022-d993371171ec\") " Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.352036 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lb5m5\" (UniqueName: \"kubernetes.io/projected/f35ff2fe-28c0-4815-a674-7b063a959b28-kube-api-access-lb5m5\") pod \"f35ff2fe-28c0-4815-a674-7b063a959b28\" (UID: \"f35ff2fe-28c0-4815-a674-7b063a959b28\") " Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.355157 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f35ff2fe-28c0-4815-a674-7b063a959b28-kube-api-access-lb5m5" (OuterVolumeSpecName: "kube-api-access-lb5m5") pod "f35ff2fe-28c0-4815-a674-7b063a959b28" (UID: "f35ff2fe-28c0-4815-a674-7b063a959b28"). InnerVolumeSpecName "kube-api-access-lb5m5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.355781 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc52c0b9-60dc-4c50-9022-d993371171ec-kube-api-access-x4ldf" (OuterVolumeSpecName: "kube-api-access-x4ldf") pod "fc52c0b9-60dc-4c50-9022-d993371171ec" (UID: "fc52c0b9-60dc-4c50-9022-d993371171ec"). InnerVolumeSpecName "kube-api-access-x4ldf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.454169 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4ldf\" (UniqueName: \"kubernetes.io/projected/fc52c0b9-60dc-4c50-9022-d993371171ec-kube-api-access-x4ldf\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.454220 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lb5m5\" (UniqueName: \"kubernetes.io/projected/f35ff2fe-28c0-4815-a674-7b063a959b28-kube-api-access-lb5m5\") on node \"crc\" DevicePath \"\"" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.788321 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2dde6251-f26b-4291-931e-30fce08578fd","Type":"ContainerStarted","Data":"909afc94c98bc7d4a92f5cc7e00b6782ab53d5ed51a49b88f48b2132905022c2"} Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.796367 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-c0e5-account-create-nx68n" event={"ID":"fc52c0b9-60dc-4c50-9022-d993371171ec","Type":"ContainerDied","Data":"c0adeafc4b000f4539de7ffbf91e2e6b56bdfc6964d96978f3d0a8d5fbad3652"} Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.796420 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c0adeafc4b000f4539de7ffbf91e2e6b56bdfc6964d96978f3d0a8d5fbad3652" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.796538 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c0e5-account-create-nx68n" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.805348 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-ca83-account-create-wxd5d" event={"ID":"f35ff2fe-28c0-4815-a674-7b063a959b28","Type":"ContainerDied","Data":"066f93f8c9837d878b0d37e834b61948d1377b41947b2b4fd1846edf215d7e3e"} Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.805398 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="066f93f8c9837d878b0d37e834b61948d1377b41947b2b4fd1846edf215d7e3e" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.805459 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-ca83-account-create-wxd5d" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.809790 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" event={"ID":"0ba34d64-f5f8-4543-91ad-deddaaa978fc","Type":"ContainerDied","Data":"fdb966fcf1659485b92c4dfb2822ad1500adf43e605d5ce513d627851382c53a"} Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.809859 4661 scope.go:117] "RemoveContainer" containerID="62b3b78dff97f3c1c36180dbd4b71ae2f1dfac2dd35b4f4c53dab3136bdb2882" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.809884 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76f9c4c8bc-xbhc8" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.889007 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76f9c4c8bc-xbhc8"] Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.895811 4661 scope.go:117] "RemoveContainer" containerID="d79757566c13ac688b66c06139663ec9b3375c5c6b3eb29f4ee239e83ac8781a" Oct 01 05:46:38 crc kubenswrapper[4661]: I1001 05:46:38.896842 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-76f9c4c8bc-xbhc8"] Oct 01 05:46:39 crc kubenswrapper[4661]: I1001 05:46:39.767211 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ba34d64-f5f8-4543-91ad-deddaaa978fc" path="/var/lib/kubelet/pods/0ba34d64-f5f8-4543-91ad-deddaaa978fc/volumes" Oct 01 05:46:39 crc kubenswrapper[4661]: I1001 05:46:39.828440 4661 generic.go:334] "Generic (PLEG): container finished" podID="7b2acad5-a746-42a5-b9e8-a9904ad242bc" containerID="039dfc87fb55da52d83c66c05a25ea5859a3d7bdb5cf40fe94cb117e3a2ca1d2" exitCode=0 Oct 01 05:46:39 crc kubenswrapper[4661]: I1001 05:46:39.828503 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7b2acad5-a746-42a5-b9e8-a9904ad242bc","Type":"ContainerDied","Data":"039dfc87fb55da52d83c66c05a25ea5859a3d7bdb5cf40fe94cb117e3a2ca1d2"} Oct 01 05:46:40 crc kubenswrapper[4661]: I1001 05:46:40.778013 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-h5lwt"] Oct 01 05:46:40 crc kubenswrapper[4661]: E1001 05:46:40.778578 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ba34d64-f5f8-4543-91ad-deddaaa978fc" containerName="init" Oct 01 05:46:40 crc kubenswrapper[4661]: I1001 05:46:40.778589 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ba34d64-f5f8-4543-91ad-deddaaa978fc" containerName="init" Oct 01 05:46:40 crc kubenswrapper[4661]: E1001 05:46:40.778600 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ba34d64-f5f8-4543-91ad-deddaaa978fc" containerName="dnsmasq-dns" Oct 01 05:46:40 crc kubenswrapper[4661]: I1001 05:46:40.778605 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ba34d64-f5f8-4543-91ad-deddaaa978fc" containerName="dnsmasq-dns" Oct 01 05:46:40 crc kubenswrapper[4661]: E1001 05:46:40.778645 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc52c0b9-60dc-4c50-9022-d993371171ec" containerName="mariadb-account-create" Oct 01 05:46:40 crc kubenswrapper[4661]: I1001 05:46:40.778651 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc52c0b9-60dc-4c50-9022-d993371171ec" containerName="mariadb-account-create" Oct 01 05:46:40 crc kubenswrapper[4661]: E1001 05:46:40.778675 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f35ff2fe-28c0-4815-a674-7b063a959b28" containerName="mariadb-account-create" Oct 01 05:46:40 crc kubenswrapper[4661]: I1001 05:46:40.778681 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="f35ff2fe-28c0-4815-a674-7b063a959b28" containerName="mariadb-account-create" Oct 01 05:46:40 crc kubenswrapper[4661]: I1001 05:46:40.778818 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ba34d64-f5f8-4543-91ad-deddaaa978fc" containerName="dnsmasq-dns" Oct 01 05:46:40 crc kubenswrapper[4661]: I1001 05:46:40.778839 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="f35ff2fe-28c0-4815-a674-7b063a959b28" containerName="mariadb-account-create" Oct 01 05:46:40 crc kubenswrapper[4661]: I1001 05:46:40.778854 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc52c0b9-60dc-4c50-9022-d993371171ec" containerName="mariadb-account-create" Oct 01 05:46:40 crc kubenswrapper[4661]: I1001 05:46:40.779365 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-h5lwt" Oct 01 05:46:40 crc kubenswrapper[4661]: I1001 05:46:40.782130 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 01 05:46:40 crc kubenswrapper[4661]: I1001 05:46:40.788478 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-h5lwt"] Oct 01 05:46:40 crc kubenswrapper[4661]: I1001 05:46:40.802424 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-f48gr" Oct 01 05:46:40 crc kubenswrapper[4661]: I1001 05:46:40.847087 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7b2acad5-a746-42a5-b9e8-a9904ad242bc","Type":"ContainerStarted","Data":"cee8f1a11352270182d3bca4ac8feb700d7870c9f94e70f1687b9bafc2489739"} Oct 01 05:46:40 crc kubenswrapper[4661]: I1001 05:46:40.848010 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 01 05:46:40 crc kubenswrapper[4661]: I1001 05:46:40.868260 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=-9223371944.986536 podStartE2EDuration="1m31.86823924s" podCreationTimestamp="2025-10-01 05:45:09 +0000 UTC" firstStartedPulling="2025-10-01 05:45:11.741000207 +0000 UTC m=+960.678978821" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:46:40.86608266 +0000 UTC m=+1049.804061274" watchObservedRunningTime="2025-10-01 05:46:40.86823924 +0000 UTC m=+1049.806217854" Oct 01 05:46:40 crc kubenswrapper[4661]: I1001 05:46:40.899202 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-combined-ca-bundle\") pod \"glance-db-sync-h5lwt\" (UID: \"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6\") " pod="openstack/glance-db-sync-h5lwt" Oct 01 05:46:40 crc kubenswrapper[4661]: I1001 05:46:40.899342 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-db-sync-config-data\") pod \"glance-db-sync-h5lwt\" (UID: \"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6\") " pod="openstack/glance-db-sync-h5lwt" Oct 01 05:46:40 crc kubenswrapper[4661]: I1001 05:46:40.899383 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljx2f\" (UniqueName: \"kubernetes.io/projected/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-kube-api-access-ljx2f\") pod \"glance-db-sync-h5lwt\" (UID: \"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6\") " pod="openstack/glance-db-sync-h5lwt" Oct 01 05:46:40 crc kubenswrapper[4661]: I1001 05:46:40.899684 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-config-data\") pod \"glance-db-sync-h5lwt\" (UID: \"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6\") " pod="openstack/glance-db-sync-h5lwt" Oct 01 05:46:41 crc kubenswrapper[4661]: I1001 05:46:41.001444 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-combined-ca-bundle\") pod \"glance-db-sync-h5lwt\" (UID: \"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6\") " pod="openstack/glance-db-sync-h5lwt" Oct 01 05:46:41 crc kubenswrapper[4661]: I1001 05:46:41.001500 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-db-sync-config-data\") pod \"glance-db-sync-h5lwt\" (UID: \"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6\") " pod="openstack/glance-db-sync-h5lwt" Oct 01 05:46:41 crc kubenswrapper[4661]: I1001 05:46:41.001527 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljx2f\" (UniqueName: \"kubernetes.io/projected/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-kube-api-access-ljx2f\") pod \"glance-db-sync-h5lwt\" (UID: \"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6\") " pod="openstack/glance-db-sync-h5lwt" Oct 01 05:46:41 crc kubenswrapper[4661]: I1001 05:46:41.001565 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-config-data\") pod \"glance-db-sync-h5lwt\" (UID: \"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6\") " pod="openstack/glance-db-sync-h5lwt" Oct 01 05:46:41 crc kubenswrapper[4661]: I1001 05:46:41.006830 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-db-sync-config-data\") pod \"glance-db-sync-h5lwt\" (UID: \"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6\") " pod="openstack/glance-db-sync-h5lwt" Oct 01 05:46:41 crc kubenswrapper[4661]: I1001 05:46:41.007134 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-config-data\") pod \"glance-db-sync-h5lwt\" (UID: \"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6\") " pod="openstack/glance-db-sync-h5lwt" Oct 01 05:46:41 crc kubenswrapper[4661]: I1001 05:46:41.007342 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-combined-ca-bundle\") pod \"glance-db-sync-h5lwt\" (UID: \"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6\") " pod="openstack/glance-db-sync-h5lwt" Oct 01 05:46:41 crc kubenswrapper[4661]: I1001 05:46:41.029773 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljx2f\" (UniqueName: \"kubernetes.io/projected/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-kube-api-access-ljx2f\") pod \"glance-db-sync-h5lwt\" (UID: \"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6\") " pod="openstack/glance-db-sync-h5lwt" Oct 01 05:46:41 crc kubenswrapper[4661]: I1001 05:46:41.103042 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-h5lwt" Oct 01 05:46:41 crc kubenswrapper[4661]: I1001 05:46:41.678290 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-h5lwt"] Oct 01 05:46:41 crc kubenswrapper[4661]: W1001 05:46:41.686652 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd9ff8ea6_9373_4cb5_941b_ac46d4fac6d6.slice/crio-190df29b94503f465a394f8762b63fc3250f2da6fa30e049d110548b13055422 WatchSource:0}: Error finding container 190df29b94503f465a394f8762b63fc3250f2da6fa30e049d110548b13055422: Status 404 returned error can't find the container with id 190df29b94503f465a394f8762b63fc3250f2da6fa30e049d110548b13055422 Oct 01 05:46:41 crc kubenswrapper[4661]: I1001 05:46:41.858413 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2dde6251-f26b-4291-931e-30fce08578fd","Type":"ContainerStarted","Data":"81134aefa5f7c53c22ec5c1fa244c34652df840a596107c5f3b37806c3cb6896"} Oct 01 05:46:41 crc kubenswrapper[4661]: I1001 05:46:41.858449 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2dde6251-f26b-4291-931e-30fce08578fd","Type":"ContainerStarted","Data":"c29be373f937e2652d05be92770fabdb937292978e266b1bb7e6206c7359ede2"} Oct 01 05:46:41 crc kubenswrapper[4661]: I1001 05:46:41.861313 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-h5lwt" event={"ID":"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6","Type":"ContainerStarted","Data":"190df29b94503f465a394f8762b63fc3250f2da6fa30e049d110548b13055422"} Oct 01 05:46:41 crc kubenswrapper[4661]: I1001 05:46:41.901102 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=18.901083006 podStartE2EDuration="18.901083006s" podCreationTimestamp="2025-10-01 05:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:46:41.892579982 +0000 UTC m=+1050.830558606" watchObservedRunningTime="2025-10-01 05:46:41.901083006 +0000 UTC m=+1050.839061640" Oct 01 05:46:43 crc kubenswrapper[4661]: I1001 05:46:43.974081 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:50 crc kubenswrapper[4661]: I1001 05:46:50.949004 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:46:51 crc kubenswrapper[4661]: I1001 05:46:51.250613 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="7b2acad5-a746-42a5-b9e8-a9904ad242bc" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.109:5671: connect: connection refused" Oct 01 05:46:51 crc kubenswrapper[4661]: I1001 05:46:51.555850 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-notifications-server-0" Oct 01 05:46:53 crc kubenswrapper[4661]: I1001 05:46:53.974348 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:53 crc kubenswrapper[4661]: I1001 05:46:53.982717 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Oct 01 05:46:54 crc kubenswrapper[4661]: I1001 05:46:54.976719 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Oct 01 05:47:00 crc kubenswrapper[4661]: I1001 05:47:00.029922 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-h5lwt" event={"ID":"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6","Type":"ContainerStarted","Data":"258f546d48f21daee8f76b5a8f2a61544865a69cddc4f8fe22e431ef7c3e992b"} Oct 01 05:47:00 crc kubenswrapper[4661]: I1001 05:47:00.062568 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-h5lwt" podStartSLOduration=2.47643582 podStartE2EDuration="20.062542922s" podCreationTimestamp="2025-10-01 05:46:40 +0000 UTC" firstStartedPulling="2025-10-01 05:46:41.689998216 +0000 UTC m=+1050.627976830" lastFinishedPulling="2025-10-01 05:46:59.276105288 +0000 UTC m=+1068.214083932" observedRunningTime="2025-10-01 05:47:00.052594899 +0000 UTC m=+1068.990573553" watchObservedRunningTime="2025-10-01 05:47:00.062542922 +0000 UTC m=+1069.000521576" Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.250760 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.694897 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-jqk5r"] Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.696272 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-jqk5r" Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.705402 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-jqk5r"] Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.793311 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-x8c5b"] Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.794280 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-x8c5b" Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.802388 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-x8c5b"] Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.844901 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-db-sync-st9fk"] Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.846686 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-st9fk" Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.849715 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-config-data" Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.849966 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-watcher-dockercfg-d2cdd" Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.862097 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kl4sb\" (UniqueName: \"kubernetes.io/projected/da61731a-e57a-4977-b781-2742e48948bd-kube-api-access-kl4sb\") pod \"barbican-db-create-jqk5r\" (UID: \"da61731a-e57a-4977-b781-2742e48948bd\") " pod="openstack/barbican-db-create-jqk5r" Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.866457 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-sync-st9fk"] Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.901584 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-4vbbg"] Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.902533 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-4vbbg" Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.909261 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-4vbbg"] Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.956781 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-qgcfj"] Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.958353 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qgcfj" Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.960395 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.960778 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-5nw2h" Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.960926 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.961992 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.963068 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kl4sb\" (UniqueName: \"kubernetes.io/projected/da61731a-e57a-4977-b781-2742e48948bd-kube-api-access-kl4sb\") pod \"barbican-db-create-jqk5r\" (UID: \"da61731a-e57a-4977-b781-2742e48948bd\") " pod="openstack/barbican-db-create-jqk5r" Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.963116 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwdng\" (UniqueName: \"kubernetes.io/projected/d52ab365-514d-4e8c-b246-717bc8a45c0a-kube-api-access-qwdng\") pod \"watcher-db-sync-st9fk\" (UID: \"d52ab365-514d-4e8c-b246-717bc8a45c0a\") " pod="openstack/watcher-db-sync-st9fk" Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.963152 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfp2l\" (UniqueName: \"kubernetes.io/projected/450dd430-3c14-413f-ba17-a467a882deb5-kube-api-access-dfp2l\") pod \"cinder-db-create-x8c5b\" (UID: \"450dd430-3c14-413f-ba17-a467a882deb5\") " pod="openstack/cinder-db-create-x8c5b" Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.963177 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d52ab365-514d-4e8c-b246-717bc8a45c0a-config-data\") pod \"watcher-db-sync-st9fk\" (UID: \"d52ab365-514d-4e8c-b246-717bc8a45c0a\") " pod="openstack/watcher-db-sync-st9fk" Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.963200 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d52ab365-514d-4e8c-b246-717bc8a45c0a-db-sync-config-data\") pod \"watcher-db-sync-st9fk\" (UID: \"d52ab365-514d-4e8c-b246-717bc8a45c0a\") " pod="openstack/watcher-db-sync-st9fk" Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.963260 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52ab365-514d-4e8c-b246-717bc8a45c0a-combined-ca-bundle\") pod \"watcher-db-sync-st9fk\" (UID: \"d52ab365-514d-4e8c-b246-717bc8a45c0a\") " pod="openstack/watcher-db-sync-st9fk" Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.977576 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-qgcfj"] Oct 01 05:47:01 crc kubenswrapper[4661]: I1001 05:47:01.988868 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kl4sb\" (UniqueName: \"kubernetes.io/projected/da61731a-e57a-4977-b781-2742e48948bd-kube-api-access-kl4sb\") pod \"barbican-db-create-jqk5r\" (UID: \"da61731a-e57a-4977-b781-2742e48948bd\") " pod="openstack/barbican-db-create-jqk5r" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.014302 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-jqk5r" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.064912 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrd4j\" (UniqueName: \"kubernetes.io/projected/43bc7be9-b86f-4811-ae9f-f8c1898910e3-kube-api-access-zrd4j\") pod \"neutron-db-create-4vbbg\" (UID: \"43bc7be9-b86f-4811-ae9f-f8c1898910e3\") " pod="openstack/neutron-db-create-4vbbg" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.064973 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwdng\" (UniqueName: \"kubernetes.io/projected/d52ab365-514d-4e8c-b246-717bc8a45c0a-kube-api-access-qwdng\") pod \"watcher-db-sync-st9fk\" (UID: \"d52ab365-514d-4e8c-b246-717bc8a45c0a\") " pod="openstack/watcher-db-sync-st9fk" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.065003 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gbrx\" (UniqueName: \"kubernetes.io/projected/59992b65-645e-44c1-9cb4-c3ac1f0bf8da-kube-api-access-4gbrx\") pod \"keystone-db-sync-qgcfj\" (UID: \"59992b65-645e-44c1-9cb4-c3ac1f0bf8da\") " pod="openstack/keystone-db-sync-qgcfj" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.065033 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfp2l\" (UniqueName: \"kubernetes.io/projected/450dd430-3c14-413f-ba17-a467a882deb5-kube-api-access-dfp2l\") pod \"cinder-db-create-x8c5b\" (UID: \"450dd430-3c14-413f-ba17-a467a882deb5\") " pod="openstack/cinder-db-create-x8c5b" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.065057 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59992b65-645e-44c1-9cb4-c3ac1f0bf8da-config-data\") pod \"keystone-db-sync-qgcfj\" (UID: \"59992b65-645e-44c1-9cb4-c3ac1f0bf8da\") " pod="openstack/keystone-db-sync-qgcfj" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.065078 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d52ab365-514d-4e8c-b246-717bc8a45c0a-config-data\") pod \"watcher-db-sync-st9fk\" (UID: \"d52ab365-514d-4e8c-b246-717bc8a45c0a\") " pod="openstack/watcher-db-sync-st9fk" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.065120 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d52ab365-514d-4e8c-b246-717bc8a45c0a-db-sync-config-data\") pod \"watcher-db-sync-st9fk\" (UID: \"d52ab365-514d-4e8c-b246-717bc8a45c0a\") " pod="openstack/watcher-db-sync-st9fk" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.065173 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59992b65-645e-44c1-9cb4-c3ac1f0bf8da-combined-ca-bundle\") pod \"keystone-db-sync-qgcfj\" (UID: \"59992b65-645e-44c1-9cb4-c3ac1f0bf8da\") " pod="openstack/keystone-db-sync-qgcfj" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.065199 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52ab365-514d-4e8c-b246-717bc8a45c0a-combined-ca-bundle\") pod \"watcher-db-sync-st9fk\" (UID: \"d52ab365-514d-4e8c-b246-717bc8a45c0a\") " pod="openstack/watcher-db-sync-st9fk" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.071259 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52ab365-514d-4e8c-b246-717bc8a45c0a-combined-ca-bundle\") pod \"watcher-db-sync-st9fk\" (UID: \"d52ab365-514d-4e8c-b246-717bc8a45c0a\") " pod="openstack/watcher-db-sync-st9fk" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.072150 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d52ab365-514d-4e8c-b246-717bc8a45c0a-config-data\") pod \"watcher-db-sync-st9fk\" (UID: \"d52ab365-514d-4e8c-b246-717bc8a45c0a\") " pod="openstack/watcher-db-sync-st9fk" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.073059 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d52ab365-514d-4e8c-b246-717bc8a45c0a-db-sync-config-data\") pod \"watcher-db-sync-st9fk\" (UID: \"d52ab365-514d-4e8c-b246-717bc8a45c0a\") " pod="openstack/watcher-db-sync-st9fk" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.083575 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfp2l\" (UniqueName: \"kubernetes.io/projected/450dd430-3c14-413f-ba17-a467a882deb5-kube-api-access-dfp2l\") pod \"cinder-db-create-x8c5b\" (UID: \"450dd430-3c14-413f-ba17-a467a882deb5\") " pod="openstack/cinder-db-create-x8c5b" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.089447 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwdng\" (UniqueName: \"kubernetes.io/projected/d52ab365-514d-4e8c-b246-717bc8a45c0a-kube-api-access-qwdng\") pod \"watcher-db-sync-st9fk\" (UID: \"d52ab365-514d-4e8c-b246-717bc8a45c0a\") " pod="openstack/watcher-db-sync-st9fk" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.131218 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-x8c5b" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.167307 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrd4j\" (UniqueName: \"kubernetes.io/projected/43bc7be9-b86f-4811-ae9f-f8c1898910e3-kube-api-access-zrd4j\") pod \"neutron-db-create-4vbbg\" (UID: \"43bc7be9-b86f-4811-ae9f-f8c1898910e3\") " pod="openstack/neutron-db-create-4vbbg" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.167354 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gbrx\" (UniqueName: \"kubernetes.io/projected/59992b65-645e-44c1-9cb4-c3ac1f0bf8da-kube-api-access-4gbrx\") pod \"keystone-db-sync-qgcfj\" (UID: \"59992b65-645e-44c1-9cb4-c3ac1f0bf8da\") " pod="openstack/keystone-db-sync-qgcfj" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.167387 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59992b65-645e-44c1-9cb4-c3ac1f0bf8da-config-data\") pod \"keystone-db-sync-qgcfj\" (UID: \"59992b65-645e-44c1-9cb4-c3ac1f0bf8da\") " pod="openstack/keystone-db-sync-qgcfj" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.167449 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59992b65-645e-44c1-9cb4-c3ac1f0bf8da-combined-ca-bundle\") pod \"keystone-db-sync-qgcfj\" (UID: \"59992b65-645e-44c1-9cb4-c3ac1f0bf8da\") " pod="openstack/keystone-db-sync-qgcfj" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.170959 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-st9fk" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.171690 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59992b65-645e-44c1-9cb4-c3ac1f0bf8da-combined-ca-bundle\") pod \"keystone-db-sync-qgcfj\" (UID: \"59992b65-645e-44c1-9cb4-c3ac1f0bf8da\") " pod="openstack/keystone-db-sync-qgcfj" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.187828 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gbrx\" (UniqueName: \"kubernetes.io/projected/59992b65-645e-44c1-9cb4-c3ac1f0bf8da-kube-api-access-4gbrx\") pod \"keystone-db-sync-qgcfj\" (UID: \"59992b65-645e-44c1-9cb4-c3ac1f0bf8da\") " pod="openstack/keystone-db-sync-qgcfj" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.194626 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrd4j\" (UniqueName: \"kubernetes.io/projected/43bc7be9-b86f-4811-ae9f-f8c1898910e3-kube-api-access-zrd4j\") pod \"neutron-db-create-4vbbg\" (UID: \"43bc7be9-b86f-4811-ae9f-f8c1898910e3\") " pod="openstack/neutron-db-create-4vbbg" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.208469 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59992b65-645e-44c1-9cb4-c3ac1f0bf8da-config-data\") pod \"keystone-db-sync-qgcfj\" (UID: \"59992b65-645e-44c1-9cb4-c3ac1f0bf8da\") " pod="openstack/keystone-db-sync-qgcfj" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.224201 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-4vbbg" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.281076 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qgcfj" Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.506941 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-jqk5r"] Oct 01 05:47:02 crc kubenswrapper[4661]: W1001 05:47:02.512378 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda61731a_e57a_4977_b781_2742e48948bd.slice/crio-78e04707e58df626a9bc2d5c6da756d4cbbfe5c87b35f2def4dbf32d16d26175 WatchSource:0}: Error finding container 78e04707e58df626a9bc2d5c6da756d4cbbfe5c87b35f2def4dbf32d16d26175: Status 404 returned error can't find the container with id 78e04707e58df626a9bc2d5c6da756d4cbbfe5c87b35f2def4dbf32d16d26175 Oct 01 05:47:02 crc kubenswrapper[4661]: W1001 05:47:02.812219 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod450dd430_3c14_413f_ba17_a467a882deb5.slice/crio-3a9ef5918596073c554562c1baae405b1aea8acd4efc4063501153fb600d2e7d WatchSource:0}: Error finding container 3a9ef5918596073c554562c1baae405b1aea8acd4efc4063501153fb600d2e7d: Status 404 returned error can't find the container with id 3a9ef5918596073c554562c1baae405b1aea8acd4efc4063501153fb600d2e7d Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.829862 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-x8c5b"] Oct 01 05:47:02 crc kubenswrapper[4661]: I1001 05:47:02.907521 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-sync-st9fk"] Oct 01 05:47:02 crc kubenswrapper[4661]: W1001 05:47:02.937509 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd52ab365_514d_4e8c_b246_717bc8a45c0a.slice/crio-0efd5d6721f884d45bba42953c4d64b4ce978b558b36ea8aef837dec281693e2 WatchSource:0}: Error finding container 0efd5d6721f884d45bba42953c4d64b4ce978b558b36ea8aef837dec281693e2: Status 404 returned error can't find the container with id 0efd5d6721f884d45bba42953c4d64b4ce978b558b36ea8aef837dec281693e2 Oct 01 05:47:03 crc kubenswrapper[4661]: I1001 05:47:03.061498 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-st9fk" event={"ID":"d52ab365-514d-4e8c-b246-717bc8a45c0a","Type":"ContainerStarted","Data":"0efd5d6721f884d45bba42953c4d64b4ce978b558b36ea8aef837dec281693e2"} Oct 01 05:47:03 crc kubenswrapper[4661]: I1001 05:47:03.067484 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-x8c5b" event={"ID":"450dd430-3c14-413f-ba17-a467a882deb5","Type":"ContainerStarted","Data":"3a9ef5918596073c554562c1baae405b1aea8acd4efc4063501153fb600d2e7d"} Oct 01 05:47:03 crc kubenswrapper[4661]: I1001 05:47:03.071289 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-jqk5r" event={"ID":"da61731a-e57a-4977-b781-2742e48948bd","Type":"ContainerStarted","Data":"cad9a8e6d787f1ba2c6817d88205144472a94551541679b222c95f6eb77b97ff"} Oct 01 05:47:03 crc kubenswrapper[4661]: I1001 05:47:03.071319 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-jqk5r" event={"ID":"da61731a-e57a-4977-b781-2742e48948bd","Type":"ContainerStarted","Data":"78e04707e58df626a9bc2d5c6da756d4cbbfe5c87b35f2def4dbf32d16d26175"} Oct 01 05:47:03 crc kubenswrapper[4661]: I1001 05:47:03.085836 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-4vbbg"] Oct 01 05:47:03 crc kubenswrapper[4661]: I1001 05:47:03.115511 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-jqk5r" podStartSLOduration=2.115494398 podStartE2EDuration="2.115494398s" podCreationTimestamp="2025-10-01 05:47:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:47:03.114960154 +0000 UTC m=+1072.052938768" watchObservedRunningTime="2025-10-01 05:47:03.115494398 +0000 UTC m=+1072.053473012" Oct 01 05:47:03 crc kubenswrapper[4661]: I1001 05:47:03.188083 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-qgcfj"] Oct 01 05:47:03 crc kubenswrapper[4661]: W1001 05:47:03.191717 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod59992b65_645e_44c1_9cb4_c3ac1f0bf8da.slice/crio-acdc0fa910f7ccfaed04678be8a73f52f195b92846e628bb8c63f443949d7e10 WatchSource:0}: Error finding container acdc0fa910f7ccfaed04678be8a73f52f195b92846e628bb8c63f443949d7e10: Status 404 returned error can't find the container with id acdc0fa910f7ccfaed04678be8a73f52f195b92846e628bb8c63f443949d7e10 Oct 01 05:47:04 crc kubenswrapper[4661]: I1001 05:47:04.091323 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qgcfj" event={"ID":"59992b65-645e-44c1-9cb4-c3ac1f0bf8da","Type":"ContainerStarted","Data":"acdc0fa910f7ccfaed04678be8a73f52f195b92846e628bb8c63f443949d7e10"} Oct 01 05:47:04 crc kubenswrapper[4661]: I1001 05:47:04.094551 4661 generic.go:334] "Generic (PLEG): container finished" podID="450dd430-3c14-413f-ba17-a467a882deb5" containerID="0c31670bca15667c235057f0bfbaf3c18ce9e873557a8599037a44672796eed9" exitCode=0 Oct 01 05:47:04 crc kubenswrapper[4661]: I1001 05:47:04.094614 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-x8c5b" event={"ID":"450dd430-3c14-413f-ba17-a467a882deb5","Type":"ContainerDied","Data":"0c31670bca15667c235057f0bfbaf3c18ce9e873557a8599037a44672796eed9"} Oct 01 05:47:04 crc kubenswrapper[4661]: I1001 05:47:04.098258 4661 generic.go:334] "Generic (PLEG): container finished" podID="43bc7be9-b86f-4811-ae9f-f8c1898910e3" containerID="1359f29d342cded5cf0e1cfde794a247fa8741949f3c7bc4fb8e7fb0ab27004f" exitCode=0 Oct 01 05:47:04 crc kubenswrapper[4661]: I1001 05:47:04.098343 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-4vbbg" event={"ID":"43bc7be9-b86f-4811-ae9f-f8c1898910e3","Type":"ContainerDied","Data":"1359f29d342cded5cf0e1cfde794a247fa8741949f3c7bc4fb8e7fb0ab27004f"} Oct 01 05:47:04 crc kubenswrapper[4661]: I1001 05:47:04.098369 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-4vbbg" event={"ID":"43bc7be9-b86f-4811-ae9f-f8c1898910e3","Type":"ContainerStarted","Data":"18b2144967e3980744f221eff38a09101ba7a7a4090719f4c977dc00a5c4205c"} Oct 01 05:47:04 crc kubenswrapper[4661]: I1001 05:47:04.110851 4661 generic.go:334] "Generic (PLEG): container finished" podID="da61731a-e57a-4977-b781-2742e48948bd" containerID="cad9a8e6d787f1ba2c6817d88205144472a94551541679b222c95f6eb77b97ff" exitCode=0 Oct 01 05:47:04 crc kubenswrapper[4661]: I1001 05:47:04.110938 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-jqk5r" event={"ID":"da61731a-e57a-4977-b781-2742e48948bd","Type":"ContainerDied","Data":"cad9a8e6d787f1ba2c6817d88205144472a94551541679b222c95f6eb77b97ff"} Oct 01 05:47:04 crc kubenswrapper[4661]: I1001 05:47:04.309010 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:47:04 crc kubenswrapper[4661]: I1001 05:47:04.309063 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:47:05 crc kubenswrapper[4661]: I1001 05:47:05.655088 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-jqk5r" Oct 01 05:47:05 crc kubenswrapper[4661]: I1001 05:47:05.788785 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kl4sb\" (UniqueName: \"kubernetes.io/projected/da61731a-e57a-4977-b781-2742e48948bd-kube-api-access-kl4sb\") pod \"da61731a-e57a-4977-b781-2742e48948bd\" (UID: \"da61731a-e57a-4977-b781-2742e48948bd\") " Oct 01 05:47:05 crc kubenswrapper[4661]: I1001 05:47:05.836775 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da61731a-e57a-4977-b781-2742e48948bd-kube-api-access-kl4sb" (OuterVolumeSpecName: "kube-api-access-kl4sb") pod "da61731a-e57a-4977-b781-2742e48948bd" (UID: "da61731a-e57a-4977-b781-2742e48948bd"). InnerVolumeSpecName "kube-api-access-kl4sb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:47:05 crc kubenswrapper[4661]: I1001 05:47:05.892542 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kl4sb\" (UniqueName: \"kubernetes.io/projected/da61731a-e57a-4977-b781-2742e48948bd-kube-api-access-kl4sb\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:05 crc kubenswrapper[4661]: I1001 05:47:05.925967 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-x8c5b" Oct 01 05:47:05 crc kubenswrapper[4661]: I1001 05:47:05.934710 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-4vbbg" Oct 01 05:47:05 crc kubenswrapper[4661]: I1001 05:47:05.994351 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrd4j\" (UniqueName: \"kubernetes.io/projected/43bc7be9-b86f-4811-ae9f-f8c1898910e3-kube-api-access-zrd4j\") pod \"43bc7be9-b86f-4811-ae9f-f8c1898910e3\" (UID: \"43bc7be9-b86f-4811-ae9f-f8c1898910e3\") " Oct 01 05:47:05 crc kubenswrapper[4661]: I1001 05:47:05.994406 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfp2l\" (UniqueName: \"kubernetes.io/projected/450dd430-3c14-413f-ba17-a467a882deb5-kube-api-access-dfp2l\") pod \"450dd430-3c14-413f-ba17-a467a882deb5\" (UID: \"450dd430-3c14-413f-ba17-a467a882deb5\") " Oct 01 05:47:05 crc kubenswrapper[4661]: I1001 05:47:05.999235 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/450dd430-3c14-413f-ba17-a467a882deb5-kube-api-access-dfp2l" (OuterVolumeSpecName: "kube-api-access-dfp2l") pod "450dd430-3c14-413f-ba17-a467a882deb5" (UID: "450dd430-3c14-413f-ba17-a467a882deb5"). InnerVolumeSpecName "kube-api-access-dfp2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:47:06 crc kubenswrapper[4661]: I1001 05:47:06.000827 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43bc7be9-b86f-4811-ae9f-f8c1898910e3-kube-api-access-zrd4j" (OuterVolumeSpecName: "kube-api-access-zrd4j") pod "43bc7be9-b86f-4811-ae9f-f8c1898910e3" (UID: "43bc7be9-b86f-4811-ae9f-f8c1898910e3"). InnerVolumeSpecName "kube-api-access-zrd4j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:47:06 crc kubenswrapper[4661]: I1001 05:47:06.096692 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrd4j\" (UniqueName: \"kubernetes.io/projected/43bc7be9-b86f-4811-ae9f-f8c1898910e3-kube-api-access-zrd4j\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:06 crc kubenswrapper[4661]: I1001 05:47:06.096729 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfp2l\" (UniqueName: \"kubernetes.io/projected/450dd430-3c14-413f-ba17-a467a882deb5-kube-api-access-dfp2l\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:06 crc kubenswrapper[4661]: I1001 05:47:06.138436 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-x8c5b" event={"ID":"450dd430-3c14-413f-ba17-a467a882deb5","Type":"ContainerDied","Data":"3a9ef5918596073c554562c1baae405b1aea8acd4efc4063501153fb600d2e7d"} Oct 01 05:47:06 crc kubenswrapper[4661]: I1001 05:47:06.138479 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3a9ef5918596073c554562c1baae405b1aea8acd4efc4063501153fb600d2e7d" Oct 01 05:47:06 crc kubenswrapper[4661]: I1001 05:47:06.138478 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-x8c5b" Oct 01 05:47:06 crc kubenswrapper[4661]: I1001 05:47:06.140530 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-4vbbg" event={"ID":"43bc7be9-b86f-4811-ae9f-f8c1898910e3","Type":"ContainerDied","Data":"18b2144967e3980744f221eff38a09101ba7a7a4090719f4c977dc00a5c4205c"} Oct 01 05:47:06 crc kubenswrapper[4661]: I1001 05:47:06.140587 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18b2144967e3980744f221eff38a09101ba7a7a4090719f4c977dc00a5c4205c" Oct 01 05:47:06 crc kubenswrapper[4661]: I1001 05:47:06.140696 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-4vbbg" Oct 01 05:47:06 crc kubenswrapper[4661]: I1001 05:47:06.159759 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-jqk5r" event={"ID":"da61731a-e57a-4977-b781-2742e48948bd","Type":"ContainerDied","Data":"78e04707e58df626a9bc2d5c6da756d4cbbfe5c87b35f2def4dbf32d16d26175"} Oct 01 05:47:06 crc kubenswrapper[4661]: I1001 05:47:06.159813 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="78e04707e58df626a9bc2d5c6da756d4cbbfe5c87b35f2def4dbf32d16d26175" Oct 01 05:47:06 crc kubenswrapper[4661]: I1001 05:47:06.159889 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-jqk5r" Oct 01 05:47:09 crc kubenswrapper[4661]: I1001 05:47:09.197602 4661 generic.go:334] "Generic (PLEG): container finished" podID="d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6" containerID="258f546d48f21daee8f76b5a8f2a61544865a69cddc4f8fe22e431ef7c3e992b" exitCode=0 Oct 01 05:47:09 crc kubenswrapper[4661]: I1001 05:47:09.197787 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-h5lwt" event={"ID":"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6","Type":"ContainerDied","Data":"258f546d48f21daee8f76b5a8f2a61544865a69cddc4f8fe22e431ef7c3e992b"} Oct 01 05:47:11 crc kubenswrapper[4661]: I1001 05:47:11.666657 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-8a27-account-create-wcdfl"] Oct 01 05:47:11 crc kubenswrapper[4661]: E1001 05:47:11.667271 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="450dd430-3c14-413f-ba17-a467a882deb5" containerName="mariadb-database-create" Oct 01 05:47:11 crc kubenswrapper[4661]: I1001 05:47:11.667283 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="450dd430-3c14-413f-ba17-a467a882deb5" containerName="mariadb-database-create" Oct 01 05:47:11 crc kubenswrapper[4661]: E1001 05:47:11.667295 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da61731a-e57a-4977-b781-2742e48948bd" containerName="mariadb-database-create" Oct 01 05:47:11 crc kubenswrapper[4661]: I1001 05:47:11.667302 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="da61731a-e57a-4977-b781-2742e48948bd" containerName="mariadb-database-create" Oct 01 05:47:11 crc kubenswrapper[4661]: E1001 05:47:11.667333 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43bc7be9-b86f-4811-ae9f-f8c1898910e3" containerName="mariadb-database-create" Oct 01 05:47:11 crc kubenswrapper[4661]: I1001 05:47:11.667339 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="43bc7be9-b86f-4811-ae9f-f8c1898910e3" containerName="mariadb-database-create" Oct 01 05:47:11 crc kubenswrapper[4661]: I1001 05:47:11.667522 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="43bc7be9-b86f-4811-ae9f-f8c1898910e3" containerName="mariadb-database-create" Oct 01 05:47:11 crc kubenswrapper[4661]: I1001 05:47:11.667535 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="450dd430-3c14-413f-ba17-a467a882deb5" containerName="mariadb-database-create" Oct 01 05:47:11 crc kubenswrapper[4661]: I1001 05:47:11.667542 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="da61731a-e57a-4977-b781-2742e48948bd" containerName="mariadb-database-create" Oct 01 05:47:11 crc kubenswrapper[4661]: I1001 05:47:11.668162 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8a27-account-create-wcdfl" Oct 01 05:47:11 crc kubenswrapper[4661]: I1001 05:47:11.670907 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 01 05:47:11 crc kubenswrapper[4661]: I1001 05:47:11.679604 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8a27-account-create-wcdfl"] Oct 01 05:47:11 crc kubenswrapper[4661]: I1001 05:47:11.802519 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6lw4\" (UniqueName: \"kubernetes.io/projected/6c241ad1-75a9-423e-942c-7e7965589bdc-kube-api-access-s6lw4\") pod \"barbican-8a27-account-create-wcdfl\" (UID: \"6c241ad1-75a9-423e-942c-7e7965589bdc\") " pod="openstack/barbican-8a27-account-create-wcdfl" Oct 01 05:47:11 crc kubenswrapper[4661]: I1001 05:47:11.906196 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6lw4\" (UniqueName: \"kubernetes.io/projected/6c241ad1-75a9-423e-942c-7e7965589bdc-kube-api-access-s6lw4\") pod \"barbican-8a27-account-create-wcdfl\" (UID: \"6c241ad1-75a9-423e-942c-7e7965589bdc\") " pod="openstack/barbican-8a27-account-create-wcdfl" Oct 01 05:47:11 crc kubenswrapper[4661]: I1001 05:47:11.928368 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6lw4\" (UniqueName: \"kubernetes.io/projected/6c241ad1-75a9-423e-942c-7e7965589bdc-kube-api-access-s6lw4\") pod \"barbican-8a27-account-create-wcdfl\" (UID: \"6c241ad1-75a9-423e-942c-7e7965589bdc\") " pod="openstack/barbican-8a27-account-create-wcdfl" Oct 01 05:47:11 crc kubenswrapper[4661]: I1001 05:47:11.995969 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8a27-account-create-wcdfl" Oct 01 05:47:13 crc kubenswrapper[4661]: I1001 05:47:13.815359 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-h5lwt" Oct 01 05:47:13 crc kubenswrapper[4661]: I1001 05:47:13.969948 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljx2f\" (UniqueName: \"kubernetes.io/projected/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-kube-api-access-ljx2f\") pod \"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6\" (UID: \"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6\") " Oct 01 05:47:13 crc kubenswrapper[4661]: I1001 05:47:13.970488 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-combined-ca-bundle\") pod \"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6\" (UID: \"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6\") " Oct 01 05:47:13 crc kubenswrapper[4661]: I1001 05:47:13.970585 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-config-data\") pod \"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6\" (UID: \"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6\") " Oct 01 05:47:13 crc kubenswrapper[4661]: I1001 05:47:13.970791 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-db-sync-config-data\") pod \"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6\" (UID: \"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6\") " Oct 01 05:47:13 crc kubenswrapper[4661]: I1001 05:47:13.975622 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6" (UID: "d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:13 crc kubenswrapper[4661]: I1001 05:47:13.976605 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-kube-api-access-ljx2f" (OuterVolumeSpecName: "kube-api-access-ljx2f") pod "d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6" (UID: "d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6"). InnerVolumeSpecName "kube-api-access-ljx2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:47:14 crc kubenswrapper[4661]: I1001 05:47:14.032819 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6" (UID: "d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:14 crc kubenswrapper[4661]: I1001 05:47:14.060082 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-config-data" (OuterVolumeSpecName: "config-data") pod "d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6" (UID: "d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:14 crc kubenswrapper[4661]: I1001 05:47:14.076336 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljx2f\" (UniqueName: \"kubernetes.io/projected/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-kube-api-access-ljx2f\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:14 crc kubenswrapper[4661]: I1001 05:47:14.076392 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:14 crc kubenswrapper[4661]: I1001 05:47:14.076405 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:14 crc kubenswrapper[4661]: I1001 05:47:14.076424 4661 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:14 crc kubenswrapper[4661]: I1001 05:47:14.098369 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8a27-account-create-wcdfl"] Oct 01 05:47:14 crc kubenswrapper[4661]: I1001 05:47:14.117401 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 01 05:47:14 crc kubenswrapper[4661]: I1001 05:47:14.257817 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-st9fk" event={"ID":"d52ab365-514d-4e8c-b246-717bc8a45c0a","Type":"ContainerStarted","Data":"8654ad653976aa1653eb601ae70d143f0f5334b9bbd9023a9f8fe748b6b06ae5"} Oct 01 05:47:14 crc kubenswrapper[4661]: I1001 05:47:14.259918 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qgcfj" event={"ID":"59992b65-645e-44c1-9cb4-c3ac1f0bf8da","Type":"ContainerStarted","Data":"4d30225988229ee0e740e71d9761ffcca9a41d499bc63419bda8d62f9742ae93"} Oct 01 05:47:14 crc kubenswrapper[4661]: I1001 05:47:14.261828 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8a27-account-create-wcdfl" event={"ID":"6c241ad1-75a9-423e-942c-7e7965589bdc","Type":"ContainerStarted","Data":"b6cf93b02f1cfca310285bae83aa560e7b3b0be338e6818e5c89c8f5315c4555"} Oct 01 05:47:14 crc kubenswrapper[4661]: I1001 05:47:14.263053 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-h5lwt" event={"ID":"d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6","Type":"ContainerDied","Data":"190df29b94503f465a394f8762b63fc3250f2da6fa30e049d110548b13055422"} Oct 01 05:47:14 crc kubenswrapper[4661]: I1001 05:47:14.263074 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="190df29b94503f465a394f8762b63fc3250f2da6fa30e049d110548b13055422" Oct 01 05:47:14 crc kubenswrapper[4661]: I1001 05:47:14.263117 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-h5lwt" Oct 01 05:47:14 crc kubenswrapper[4661]: I1001 05:47:14.277958 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-db-sync-st9fk" podStartSLOduration=2.561803481 podStartE2EDuration="13.277938202s" podCreationTimestamp="2025-10-01 05:47:01 +0000 UTC" firstStartedPulling="2025-10-01 05:47:02.940018688 +0000 UTC m=+1071.877997292" lastFinishedPulling="2025-10-01 05:47:13.656153389 +0000 UTC m=+1082.594132013" observedRunningTime="2025-10-01 05:47:14.272711778 +0000 UTC m=+1083.210690392" watchObservedRunningTime="2025-10-01 05:47:14.277938202 +0000 UTC m=+1083.215916816" Oct 01 05:47:14 crc kubenswrapper[4661]: I1001 05:47:14.290236 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-qgcfj" podStartSLOduration=2.851150856 podStartE2EDuration="13.29020687s" podCreationTimestamp="2025-10-01 05:47:01 +0000 UTC" firstStartedPulling="2025-10-01 05:47:03.194120882 +0000 UTC m=+1072.132099486" lastFinishedPulling="2025-10-01 05:47:13.633176886 +0000 UTC m=+1082.571155500" observedRunningTime="2025-10-01 05:47:14.288384839 +0000 UTC m=+1083.226363453" watchObservedRunningTime="2025-10-01 05:47:14.29020687 +0000 UTC m=+1083.228185484" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.257938 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5dd98f6d57-t2r8r"] Oct 01 05:47:15 crc kubenswrapper[4661]: E1001 05:47:15.258473 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6" containerName="glance-db-sync" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.258485 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6" containerName="glance-db-sync" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.258673 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6" containerName="glance-db-sync" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.259506 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.278188 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5dd98f6d57-t2r8r"] Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.282061 4661 generic.go:334] "Generic (PLEG): container finished" podID="6c241ad1-75a9-423e-942c-7e7965589bdc" containerID="4fd219f68f45baa29b0a78f6a3431221cdf2936d9690369a60d9b8c611b03285" exitCode=0 Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.284167 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8a27-account-create-wcdfl" event={"ID":"6c241ad1-75a9-423e-942c-7e7965589bdc","Type":"ContainerDied","Data":"4fd219f68f45baa29b0a78f6a3431221cdf2936d9690369a60d9b8c611b03285"} Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.411219 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-config\") pod \"dnsmasq-dns-5dd98f6d57-t2r8r\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.411427 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-dns-swift-storage-0\") pod \"dnsmasq-dns-5dd98f6d57-t2r8r\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.411454 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-dns-svc\") pod \"dnsmasq-dns-5dd98f6d57-t2r8r\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.411483 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpbhd\" (UniqueName: \"kubernetes.io/projected/67e7eee6-ae30-4dad-88cb-6e22b9f85049-kube-api-access-xpbhd\") pod \"dnsmasq-dns-5dd98f6d57-t2r8r\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.411500 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-ovsdbserver-nb\") pod \"dnsmasq-dns-5dd98f6d57-t2r8r\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.411563 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-ovsdbserver-sb\") pod \"dnsmasq-dns-5dd98f6d57-t2r8r\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.512843 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpbhd\" (UniqueName: \"kubernetes.io/projected/67e7eee6-ae30-4dad-88cb-6e22b9f85049-kube-api-access-xpbhd\") pod \"dnsmasq-dns-5dd98f6d57-t2r8r\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.512885 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-ovsdbserver-nb\") pod \"dnsmasq-dns-5dd98f6d57-t2r8r\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.512954 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-ovsdbserver-sb\") pod \"dnsmasq-dns-5dd98f6d57-t2r8r\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.512992 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-config\") pod \"dnsmasq-dns-5dd98f6d57-t2r8r\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.513014 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-dns-swift-storage-0\") pod \"dnsmasq-dns-5dd98f6d57-t2r8r\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.513038 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-dns-svc\") pod \"dnsmasq-dns-5dd98f6d57-t2r8r\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.513827 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-ovsdbserver-nb\") pod \"dnsmasq-dns-5dd98f6d57-t2r8r\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.514352 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-dns-svc\") pod \"dnsmasq-dns-5dd98f6d57-t2r8r\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.514369 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-dns-swift-storage-0\") pod \"dnsmasq-dns-5dd98f6d57-t2r8r\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.514742 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-config\") pod \"dnsmasq-dns-5dd98f6d57-t2r8r\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.514876 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-ovsdbserver-sb\") pod \"dnsmasq-dns-5dd98f6d57-t2r8r\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.533075 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpbhd\" (UniqueName: \"kubernetes.io/projected/67e7eee6-ae30-4dad-88cb-6e22b9f85049-kube-api-access-xpbhd\") pod \"dnsmasq-dns-5dd98f6d57-t2r8r\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:15 crc kubenswrapper[4661]: I1001 05:47:15.587110 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:16 crc kubenswrapper[4661]: I1001 05:47:16.094941 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5dd98f6d57-t2r8r"] Oct 01 05:47:16 crc kubenswrapper[4661]: I1001 05:47:16.292032 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" event={"ID":"67e7eee6-ae30-4dad-88cb-6e22b9f85049","Type":"ContainerStarted","Data":"624c577f02990ad7754fd32f41c182c7aaf74e0abdd3e8d0c2cb7dd3ad55271a"} Oct 01 05:47:16 crc kubenswrapper[4661]: I1001 05:47:16.726361 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8a27-account-create-wcdfl" Oct 01 05:47:16 crc kubenswrapper[4661]: I1001 05:47:16.836273 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6lw4\" (UniqueName: \"kubernetes.io/projected/6c241ad1-75a9-423e-942c-7e7965589bdc-kube-api-access-s6lw4\") pod \"6c241ad1-75a9-423e-942c-7e7965589bdc\" (UID: \"6c241ad1-75a9-423e-942c-7e7965589bdc\") " Oct 01 05:47:16 crc kubenswrapper[4661]: I1001 05:47:16.860908 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c241ad1-75a9-423e-942c-7e7965589bdc-kube-api-access-s6lw4" (OuterVolumeSpecName: "kube-api-access-s6lw4") pod "6c241ad1-75a9-423e-942c-7e7965589bdc" (UID: "6c241ad1-75a9-423e-942c-7e7965589bdc"). InnerVolumeSpecName "kube-api-access-s6lw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:47:16 crc kubenswrapper[4661]: I1001 05:47:16.938306 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6lw4\" (UniqueName: \"kubernetes.io/projected/6c241ad1-75a9-423e-942c-7e7965589bdc-kube-api-access-s6lw4\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:17 crc kubenswrapper[4661]: I1001 05:47:17.303357 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8a27-account-create-wcdfl" Oct 01 05:47:17 crc kubenswrapper[4661]: I1001 05:47:17.303333 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8a27-account-create-wcdfl" event={"ID":"6c241ad1-75a9-423e-942c-7e7965589bdc","Type":"ContainerDied","Data":"b6cf93b02f1cfca310285bae83aa560e7b3b0be338e6818e5c89c8f5315c4555"} Oct 01 05:47:17 crc kubenswrapper[4661]: I1001 05:47:17.303527 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b6cf93b02f1cfca310285bae83aa560e7b3b0be338e6818e5c89c8f5315c4555" Oct 01 05:47:17 crc kubenswrapper[4661]: I1001 05:47:17.305346 4661 generic.go:334] "Generic (PLEG): container finished" podID="67e7eee6-ae30-4dad-88cb-6e22b9f85049" containerID="8ebfc3004d0b00dc074deae9d9c6ea849ac406a80b9152e8eb6ceda847bd6119" exitCode=0 Oct 01 05:47:17 crc kubenswrapper[4661]: I1001 05:47:17.305396 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" event={"ID":"67e7eee6-ae30-4dad-88cb-6e22b9f85049","Type":"ContainerDied","Data":"8ebfc3004d0b00dc074deae9d9c6ea849ac406a80b9152e8eb6ceda847bd6119"} Oct 01 05:47:18 crc kubenswrapper[4661]: I1001 05:47:18.317853 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" event={"ID":"67e7eee6-ae30-4dad-88cb-6e22b9f85049","Type":"ContainerStarted","Data":"3a0402e040b0c9ae8eb4a1f588daa821f635bd2fa36ac9193dfc33e7a3ccdf17"} Oct 01 05:47:18 crc kubenswrapper[4661]: I1001 05:47:18.324485 4661 generic.go:334] "Generic (PLEG): container finished" podID="d52ab365-514d-4e8c-b246-717bc8a45c0a" containerID="8654ad653976aa1653eb601ae70d143f0f5334b9bbd9023a9f8fe748b6b06ae5" exitCode=0 Oct 01 05:47:18 crc kubenswrapper[4661]: I1001 05:47:18.324527 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-st9fk" event={"ID":"d52ab365-514d-4e8c-b246-717bc8a45c0a","Type":"ContainerDied","Data":"8654ad653976aa1653eb601ae70d143f0f5334b9bbd9023a9f8fe748b6b06ae5"} Oct 01 05:47:18 crc kubenswrapper[4661]: I1001 05:47:18.345648 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" podStartSLOduration=3.345614167 podStartE2EDuration="3.345614167s" podCreationTimestamp="2025-10-01 05:47:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:47:18.337120383 +0000 UTC m=+1087.275098997" watchObservedRunningTime="2025-10-01 05:47:18.345614167 +0000 UTC m=+1087.283592781" Oct 01 05:47:19 crc kubenswrapper[4661]: I1001 05:47:19.338815 4661 generic.go:334] "Generic (PLEG): container finished" podID="59992b65-645e-44c1-9cb4-c3ac1f0bf8da" containerID="4d30225988229ee0e740e71d9761ffcca9a41d499bc63419bda8d62f9742ae93" exitCode=0 Oct 01 05:47:19 crc kubenswrapper[4661]: I1001 05:47:19.338943 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qgcfj" event={"ID":"59992b65-645e-44c1-9cb4-c3ac1f0bf8da","Type":"ContainerDied","Data":"4d30225988229ee0e740e71d9761ffcca9a41d499bc63419bda8d62f9742ae93"} Oct 01 05:47:19 crc kubenswrapper[4661]: I1001 05:47:19.340526 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:19 crc kubenswrapper[4661]: I1001 05:47:19.865254 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-st9fk" Oct 01 05:47:19 crc kubenswrapper[4661]: I1001 05:47:19.991591 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d52ab365-514d-4e8c-b246-717bc8a45c0a-config-data\") pod \"d52ab365-514d-4e8c-b246-717bc8a45c0a\" (UID: \"d52ab365-514d-4e8c-b246-717bc8a45c0a\") " Oct 01 05:47:19 crc kubenswrapper[4661]: I1001 05:47:19.991842 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52ab365-514d-4e8c-b246-717bc8a45c0a-combined-ca-bundle\") pod \"d52ab365-514d-4e8c-b246-717bc8a45c0a\" (UID: \"d52ab365-514d-4e8c-b246-717bc8a45c0a\") " Oct 01 05:47:19 crc kubenswrapper[4661]: I1001 05:47:19.992002 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d52ab365-514d-4e8c-b246-717bc8a45c0a-db-sync-config-data\") pod \"d52ab365-514d-4e8c-b246-717bc8a45c0a\" (UID: \"d52ab365-514d-4e8c-b246-717bc8a45c0a\") " Oct 01 05:47:19 crc kubenswrapper[4661]: I1001 05:47:19.992048 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qwdng\" (UniqueName: \"kubernetes.io/projected/d52ab365-514d-4e8c-b246-717bc8a45c0a-kube-api-access-qwdng\") pod \"d52ab365-514d-4e8c-b246-717bc8a45c0a\" (UID: \"d52ab365-514d-4e8c-b246-717bc8a45c0a\") " Oct 01 05:47:20 crc kubenswrapper[4661]: I1001 05:47:19.999950 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d52ab365-514d-4e8c-b246-717bc8a45c0a-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "d52ab365-514d-4e8c-b246-717bc8a45c0a" (UID: "d52ab365-514d-4e8c-b246-717bc8a45c0a"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:20 crc kubenswrapper[4661]: I1001 05:47:20.001972 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d52ab365-514d-4e8c-b246-717bc8a45c0a-kube-api-access-qwdng" (OuterVolumeSpecName: "kube-api-access-qwdng") pod "d52ab365-514d-4e8c-b246-717bc8a45c0a" (UID: "d52ab365-514d-4e8c-b246-717bc8a45c0a"). InnerVolumeSpecName "kube-api-access-qwdng". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:47:20 crc kubenswrapper[4661]: I1001 05:47:20.022593 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d52ab365-514d-4e8c-b246-717bc8a45c0a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d52ab365-514d-4e8c-b246-717bc8a45c0a" (UID: "d52ab365-514d-4e8c-b246-717bc8a45c0a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:20 crc kubenswrapper[4661]: I1001 05:47:20.052914 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d52ab365-514d-4e8c-b246-717bc8a45c0a-config-data" (OuterVolumeSpecName: "config-data") pod "d52ab365-514d-4e8c-b246-717bc8a45c0a" (UID: "d52ab365-514d-4e8c-b246-717bc8a45c0a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:20 crc kubenswrapper[4661]: I1001 05:47:20.094826 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d52ab365-514d-4e8c-b246-717bc8a45c0a-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:20 crc kubenswrapper[4661]: I1001 05:47:20.094952 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52ab365-514d-4e8c-b246-717bc8a45c0a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:20 crc kubenswrapper[4661]: I1001 05:47:20.094975 4661 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d52ab365-514d-4e8c-b246-717bc8a45c0a-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:20 crc kubenswrapper[4661]: I1001 05:47:20.094995 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qwdng\" (UniqueName: \"kubernetes.io/projected/d52ab365-514d-4e8c-b246-717bc8a45c0a-kube-api-access-qwdng\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:20 crc kubenswrapper[4661]: I1001 05:47:20.354080 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-st9fk" event={"ID":"d52ab365-514d-4e8c-b246-717bc8a45c0a","Type":"ContainerDied","Data":"0efd5d6721f884d45bba42953c4d64b4ce978b558b36ea8aef837dec281693e2"} Oct 01 05:47:20 crc kubenswrapper[4661]: I1001 05:47:20.354147 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0efd5d6721f884d45bba42953c4d64b4ce978b558b36ea8aef837dec281693e2" Oct 01 05:47:20 crc kubenswrapper[4661]: I1001 05:47:20.354180 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-st9fk" Oct 01 05:47:20 crc kubenswrapper[4661]: I1001 05:47:20.776378 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qgcfj" Oct 01 05:47:20 crc kubenswrapper[4661]: I1001 05:47:20.932472 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4gbrx\" (UniqueName: \"kubernetes.io/projected/59992b65-645e-44c1-9cb4-c3ac1f0bf8da-kube-api-access-4gbrx\") pod \"59992b65-645e-44c1-9cb4-c3ac1f0bf8da\" (UID: \"59992b65-645e-44c1-9cb4-c3ac1f0bf8da\") " Oct 01 05:47:20 crc kubenswrapper[4661]: I1001 05:47:20.932593 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59992b65-645e-44c1-9cb4-c3ac1f0bf8da-config-data\") pod \"59992b65-645e-44c1-9cb4-c3ac1f0bf8da\" (UID: \"59992b65-645e-44c1-9cb4-c3ac1f0bf8da\") " Oct 01 05:47:20 crc kubenswrapper[4661]: I1001 05:47:20.932704 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59992b65-645e-44c1-9cb4-c3ac1f0bf8da-combined-ca-bundle\") pod \"59992b65-645e-44c1-9cb4-c3ac1f0bf8da\" (UID: \"59992b65-645e-44c1-9cb4-c3ac1f0bf8da\") " Oct 01 05:47:20 crc kubenswrapper[4661]: I1001 05:47:20.936888 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59992b65-645e-44c1-9cb4-c3ac1f0bf8da-kube-api-access-4gbrx" (OuterVolumeSpecName: "kube-api-access-4gbrx") pod "59992b65-645e-44c1-9cb4-c3ac1f0bf8da" (UID: "59992b65-645e-44c1-9cb4-c3ac1f0bf8da"). InnerVolumeSpecName "kube-api-access-4gbrx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:47:20 crc kubenswrapper[4661]: I1001 05:47:20.957004 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59992b65-645e-44c1-9cb4-c3ac1f0bf8da-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "59992b65-645e-44c1-9cb4-c3ac1f0bf8da" (UID: "59992b65-645e-44c1-9cb4-c3ac1f0bf8da"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.002988 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59992b65-645e-44c1-9cb4-c3ac1f0bf8da-config-data" (OuterVolumeSpecName: "config-data") pod "59992b65-645e-44c1-9cb4-c3ac1f0bf8da" (UID: "59992b65-645e-44c1-9cb4-c3ac1f0bf8da"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.039308 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59992b65-645e-44c1-9cb4-c3ac1f0bf8da-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.039364 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59992b65-645e-44c1-9cb4-c3ac1f0bf8da-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.039382 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4gbrx\" (UniqueName: \"kubernetes.io/projected/59992b65-645e-44c1-9cb4-c3ac1f0bf8da-kube-api-access-4gbrx\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.362563 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qgcfj" event={"ID":"59992b65-645e-44c1-9cb4-c3ac1f0bf8da","Type":"ContainerDied","Data":"acdc0fa910f7ccfaed04678be8a73f52f195b92846e628bb8c63f443949d7e10"} Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.362600 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="acdc0fa910f7ccfaed04678be8a73f52f195b92846e628bb8c63f443949d7e10" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.362675 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qgcfj" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.574516 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5dd98f6d57-t2r8r"] Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.575459 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" podUID="67e7eee6-ae30-4dad-88cb-6e22b9f85049" containerName="dnsmasq-dns" containerID="cri-o://3a0402e040b0c9ae8eb4a1f588daa821f635bd2fa36ac9193dfc33e7a3ccdf17" gracePeriod=10 Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.602056 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-7v9qh"] Oct 01 05:47:21 crc kubenswrapper[4661]: E1001 05:47:21.602602 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c241ad1-75a9-423e-942c-7e7965589bdc" containerName="mariadb-account-create" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.602689 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c241ad1-75a9-423e-942c-7e7965589bdc" containerName="mariadb-account-create" Oct 01 05:47:21 crc kubenswrapper[4661]: E1001 05:47:21.602771 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d52ab365-514d-4e8c-b246-717bc8a45c0a" containerName="watcher-db-sync" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.602827 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="d52ab365-514d-4e8c-b246-717bc8a45c0a" containerName="watcher-db-sync" Oct 01 05:47:21 crc kubenswrapper[4661]: E1001 05:47:21.602887 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59992b65-645e-44c1-9cb4-c3ac1f0bf8da" containerName="keystone-db-sync" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.602934 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="59992b65-645e-44c1-9cb4-c3ac1f0bf8da" containerName="keystone-db-sync" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.603148 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="d52ab365-514d-4e8c-b246-717bc8a45c0a" containerName="watcher-db-sync" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.603220 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="59992b65-645e-44c1-9cb4-c3ac1f0bf8da" containerName="keystone-db-sync" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.603290 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c241ad1-75a9-423e-942c-7e7965589bdc" containerName="mariadb-account-create" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.603935 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7v9qh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.609515 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-5nw2h" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.609755 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.609859 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.615821 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.631913 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7v9qh"] Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.655765 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55d459d457-scvxh"] Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.659227 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55d459d457-scvxh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.676243 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-scripts\") pod \"keystone-bootstrap-7v9qh\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " pod="openstack/keystone-bootstrap-7v9qh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.676308 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-ovsdbserver-sb\") pod \"dnsmasq-dns-55d459d457-scvxh\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " pod="openstack/dnsmasq-dns-55d459d457-scvxh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.676345 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-config-data\") pod \"keystone-bootstrap-7v9qh\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " pod="openstack/keystone-bootstrap-7v9qh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.676366 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-fernet-keys\") pod \"keystone-bootstrap-7v9qh\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " pod="openstack/keystone-bootstrap-7v9qh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.676392 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-credential-keys\") pod \"keystone-bootstrap-7v9qh\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " pod="openstack/keystone-bootstrap-7v9qh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.676426 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-combined-ca-bundle\") pod \"keystone-bootstrap-7v9qh\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " pod="openstack/keystone-bootstrap-7v9qh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.676446 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-ovsdbserver-nb\") pod \"dnsmasq-dns-55d459d457-scvxh\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " pod="openstack/dnsmasq-dns-55d459d457-scvxh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.676475 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nghhw\" (UniqueName: \"kubernetes.io/projected/de7b601b-11d7-48a5-9c1b-65a8a410138f-kube-api-access-nghhw\") pod \"dnsmasq-dns-55d459d457-scvxh\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " pod="openstack/dnsmasq-dns-55d459d457-scvxh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.676515 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-config\") pod \"dnsmasq-dns-55d459d457-scvxh\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " pod="openstack/dnsmasq-dns-55d459d457-scvxh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.676534 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-dns-swift-storage-0\") pod \"dnsmasq-dns-55d459d457-scvxh\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " pod="openstack/dnsmasq-dns-55d459d457-scvxh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.676575 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-dns-svc\") pod \"dnsmasq-dns-55d459d457-scvxh\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " pod="openstack/dnsmasq-dns-55d459d457-scvxh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.676604 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhqn5\" (UniqueName: \"kubernetes.io/projected/788e9378-cdc8-4a59-8d19-4ace345b122f-kube-api-access-qhqn5\") pod \"keystone-bootstrap-7v9qh\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " pod="openstack/keystone-bootstrap-7v9qh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.700837 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55d459d457-scvxh"] Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.755510 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-a7c0-account-create-qc4hg"] Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.757590 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-a7c0-account-create-qc4hg" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.771371 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-a7c0-account-create-qc4hg"] Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.775086 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.777682 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-applier-0"] Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.778834 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.780803 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-fernet-keys\") pod \"keystone-bootstrap-7v9qh\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " pod="openstack/keystone-bootstrap-7v9qh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.788393 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-credential-keys\") pod \"keystone-bootstrap-7v9qh\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " pod="openstack/keystone-bootstrap-7v9qh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.788451 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-combined-ca-bundle\") pod \"keystone-bootstrap-7v9qh\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " pod="openstack/keystone-bootstrap-7v9qh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.788485 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-ovsdbserver-nb\") pod \"dnsmasq-dns-55d459d457-scvxh\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " pod="openstack/dnsmasq-dns-55d459d457-scvxh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.788555 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nghhw\" (UniqueName: \"kubernetes.io/projected/de7b601b-11d7-48a5-9c1b-65a8a410138f-kube-api-access-nghhw\") pod \"dnsmasq-dns-55d459d457-scvxh\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " pod="openstack/dnsmasq-dns-55d459d457-scvxh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.788609 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-config\") pod \"dnsmasq-dns-55d459d457-scvxh\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " pod="openstack/dnsmasq-dns-55d459d457-scvxh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.788680 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-dns-swift-storage-0\") pod \"dnsmasq-dns-55d459d457-scvxh\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " pod="openstack/dnsmasq-dns-55d459d457-scvxh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.788738 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-dns-svc\") pod \"dnsmasq-dns-55d459d457-scvxh\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " pod="openstack/dnsmasq-dns-55d459d457-scvxh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.788768 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhqn5\" (UniqueName: \"kubernetes.io/projected/788e9378-cdc8-4a59-8d19-4ace345b122f-kube-api-access-qhqn5\") pod \"keystone-bootstrap-7v9qh\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " pod="openstack/keystone-bootstrap-7v9qh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.788801 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bs6r\" (UniqueName: \"kubernetes.io/projected/4d174eeb-ff97-4090-9f9a-d30f97bd926e-kube-api-access-7bs6r\") pod \"cinder-a7c0-account-create-qc4hg\" (UID: \"4d174eeb-ff97-4090-9f9a-d30f97bd926e\") " pod="openstack/cinder-a7c0-account-create-qc4hg" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.788876 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-scripts\") pod \"keystone-bootstrap-7v9qh\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " pod="openstack/keystone-bootstrap-7v9qh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.788918 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-ovsdbserver-sb\") pod \"dnsmasq-dns-55d459d457-scvxh\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " pod="openstack/dnsmasq-dns-55d459d457-scvxh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.788959 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-config-data\") pod \"keystone-bootstrap-7v9qh\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " pod="openstack/keystone-bootstrap-7v9qh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.790826 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-ovsdbserver-nb\") pod \"dnsmasq-dns-55d459d457-scvxh\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " pod="openstack/dnsmasq-dns-55d459d457-scvxh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.791071 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-watcher-dockercfg-d2cdd" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.791239 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-applier-config-data" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.793994 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-decision-engine-0"] Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.795171 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.796083 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-dns-svc\") pod \"dnsmasq-dns-55d459d457-scvxh\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " pod="openstack/dnsmasq-dns-55d459d457-scvxh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.796904 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-dns-swift-storage-0\") pod \"dnsmasq-dns-55d459d457-scvxh\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " pod="openstack/dnsmasq-dns-55d459d457-scvxh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.797428 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-ovsdbserver-sb\") pod \"dnsmasq-dns-55d459d457-scvxh\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " pod="openstack/dnsmasq-dns-55d459d457-scvxh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.798136 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-decision-engine-config-data" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.799930 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-config\") pod \"dnsmasq-dns-55d459d457-scvxh\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " pod="openstack/dnsmasq-dns-55d459d457-scvxh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.801702 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.810977 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.814623 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-scripts\") pod \"keystone-bootstrap-7v9qh\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " pod="openstack/keystone-bootstrap-7v9qh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.815590 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-config-data\") pod \"keystone-bootstrap-7v9qh\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " pod="openstack/keystone-bootstrap-7v9qh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.817765 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-credential-keys\") pod \"keystone-bootstrap-7v9qh\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " pod="openstack/keystone-bootstrap-7v9qh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.818566 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-combined-ca-bundle\") pod \"keystone-bootstrap-7v9qh\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " pod="openstack/keystone-bootstrap-7v9qh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.834807 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-fernet-keys\") pod \"keystone-bootstrap-7v9qh\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " pod="openstack/keystone-bootstrap-7v9qh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.849135 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nghhw\" (UniqueName: \"kubernetes.io/projected/de7b601b-11d7-48a5-9c1b-65a8a410138f-kube-api-access-nghhw\") pod \"dnsmasq-dns-55d459d457-scvxh\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " pod="openstack/dnsmasq-dns-55d459d457-scvxh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.892897 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bs6r\" (UniqueName: \"kubernetes.io/projected/4d174eeb-ff97-4090-9f9a-d30f97bd926e-kube-api-access-7bs6r\") pod \"cinder-a7c0-account-create-qc4hg\" (UID: \"4d174eeb-ff97-4090-9f9a-d30f97bd926e\") " pod="openstack/cinder-a7c0-account-create-qc4hg" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.906704 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.908143 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.925516 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.925535 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhqn5\" (UniqueName: \"kubernetes.io/projected/788e9378-cdc8-4a59-8d19-4ace345b122f-kube-api-access-qhqn5\") pod \"keystone-bootstrap-7v9qh\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " pod="openstack/keystone-bootstrap-7v9qh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.925597 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.926106 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7v9qh" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.940990 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5ff7f46dd9-gj8q6"] Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.942426 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5ff7f46dd9-gj8q6" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.946883 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.947065 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.970990 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-9fj4q" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.977258 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.978425 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5ff7f46dd9-gj8q6"] Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.980459 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bs6r\" (UniqueName: \"kubernetes.io/projected/4d174eeb-ff97-4090-9f9a-d30f97bd926e-kube-api-access-7bs6r\") pod \"cinder-a7c0-account-create-qc4hg\" (UID: \"4d174eeb-ff97-4090-9f9a-d30f97bd926e\") " pod="openstack/cinder-a7c0-account-create-qc4hg" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.994300 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-config-data\") pod \"watcher-applier-0\" (UID: \"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12\") " pod="openstack/watcher-applier-0" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.994491 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.994596 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12\") " pod="openstack/watcher-applier-0" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.994678 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fh4ts\" (UniqueName: \"kubernetes.io/projected/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-kube-api-access-fh4ts\") pod \"watcher-applier-0\" (UID: \"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12\") " pod="openstack/watcher-applier-0" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.994788 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8b7jj\" (UniqueName: \"kubernetes.io/projected/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-kube-api-access-8b7jj\") pod \"watcher-decision-engine-0\" (UID: \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.994864 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.994967 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-logs\") pod \"watcher-applier-0\" (UID: \"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12\") " pod="openstack/watcher-applier-0" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.995051 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-config-data\") pod \"watcher-decision-engine-0\" (UID: \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:47:21 crc kubenswrapper[4661]: I1001 05:47:21.995142 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-logs\") pod \"watcher-decision-engine-0\" (UID: \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.008622 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55d459d457-scvxh" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.048413 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-6s9q7"] Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.049793 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-6s9q7" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.056380 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.057544 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-bj9rg" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.057767 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.058323 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-6s9q7"] Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.087924 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55d459d457-scvxh"] Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.105727 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7ldx\" (UniqueName: \"kubernetes.io/projected/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-kube-api-access-k7ldx\") pod \"watcher-api-0\" (UID: \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\") " pod="openstack/watcher-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.105769 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-logs\") pod \"watcher-api-0\" (UID: \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\") " pod="openstack/watcher-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.105805 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8b7jj\" (UniqueName: \"kubernetes.io/projected/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-kube-api-access-8b7jj\") pod \"watcher-decision-engine-0\" (UID: \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.105836 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.105863 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmd2b\" (UniqueName: \"kubernetes.io/projected/337f7c79-e3bf-49ef-b783-9ac03df52fac-kube-api-access-vmd2b\") pod \"horizon-5ff7f46dd9-gj8q6\" (UID: \"337f7c79-e3bf-49ef-b783-9ac03df52fac\") " pod="openstack/horizon-5ff7f46dd9-gj8q6" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.105896 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-logs\") pod \"watcher-applier-0\" (UID: \"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12\") " pod="openstack/watcher-applier-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.105917 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-config-data\") pod \"watcher-decision-engine-0\" (UID: \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.105941 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/337f7c79-e3bf-49ef-b783-9ac03df52fac-scripts\") pod \"horizon-5ff7f46dd9-gj8q6\" (UID: \"337f7c79-e3bf-49ef-b783-9ac03df52fac\") " pod="openstack/horizon-5ff7f46dd9-gj8q6" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.105959 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/337f7c79-e3bf-49ef-b783-9ac03df52fac-logs\") pod \"horizon-5ff7f46dd9-gj8q6\" (UID: \"337f7c79-e3bf-49ef-b783-9ac03df52fac\") " pod="openstack/horizon-5ff7f46dd9-gj8q6" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.105977 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-logs\") pod \"watcher-decision-engine-0\" (UID: \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.105999 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\") " pod="openstack/watcher-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.106014 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-config-data\") pod \"watcher-applier-0\" (UID: \"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12\") " pod="openstack/watcher-applier-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.106036 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-config-data\") pod \"watcher-api-0\" (UID: \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\") " pod="openstack/watcher-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.106056 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\") " pod="openstack/watcher-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.106083 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/337f7c79-e3bf-49ef-b783-9ac03df52fac-config-data\") pod \"horizon-5ff7f46dd9-gj8q6\" (UID: \"337f7c79-e3bf-49ef-b783-9ac03df52fac\") " pod="openstack/horizon-5ff7f46dd9-gj8q6" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.106110 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/337f7c79-e3bf-49ef-b783-9ac03df52fac-horizon-secret-key\") pod \"horizon-5ff7f46dd9-gj8q6\" (UID: \"337f7c79-e3bf-49ef-b783-9ac03df52fac\") " pod="openstack/horizon-5ff7f46dd9-gj8q6" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.106127 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.106144 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12\") " pod="openstack/watcher-applier-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.106161 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fh4ts\" (UniqueName: \"kubernetes.io/projected/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-kube-api-access-fh4ts\") pod \"watcher-applier-0\" (UID: \"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12\") " pod="openstack/watcher-applier-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.123248 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-logs\") pod \"watcher-applier-0\" (UID: \"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12\") " pod="openstack/watcher-applier-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.123599 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-logs\") pod \"watcher-decision-engine-0\" (UID: \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.130298 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.176048 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8b7jj\" (UniqueName: \"kubernetes.io/projected/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-kube-api-access-8b7jj\") pod \"watcher-decision-engine-0\" (UID: \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.213047 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-a1c7-account-create-979gq"] Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.214884 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a1c7-account-create-979gq" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.222151 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/337f7c79-e3bf-49ef-b783-9ac03df52fac-config-data\") pod \"horizon-5ff7f46dd9-gj8q6\" (UID: \"337f7c79-e3bf-49ef-b783-9ac03df52fac\") " pod="openstack/horizon-5ff7f46dd9-gj8q6" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.222230 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/000282a3-18a6-4ea3-8055-ae4ad12ed82f-config-data\") pod \"placement-db-sync-6s9q7\" (UID: \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\") " pod="openstack/placement-db-sync-6s9q7" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.222254 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/337f7c79-e3bf-49ef-b783-9ac03df52fac-horizon-secret-key\") pod \"horizon-5ff7f46dd9-gj8q6\" (UID: \"337f7c79-e3bf-49ef-b783-9ac03df52fac\") " pod="openstack/horizon-5ff7f46dd9-gj8q6" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.222307 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/000282a3-18a6-4ea3-8055-ae4ad12ed82f-scripts\") pod \"placement-db-sync-6s9q7\" (UID: \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\") " pod="openstack/placement-db-sync-6s9q7" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.222337 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7ldx\" (UniqueName: \"kubernetes.io/projected/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-kube-api-access-k7ldx\") pod \"watcher-api-0\" (UID: \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\") " pod="openstack/watcher-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.222361 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-logs\") pod \"watcher-api-0\" (UID: \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\") " pod="openstack/watcher-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.222431 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/000282a3-18a6-4ea3-8055-ae4ad12ed82f-combined-ca-bundle\") pod \"placement-db-sync-6s9q7\" (UID: \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\") " pod="openstack/placement-db-sync-6s9q7" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.222479 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmd2b\" (UniqueName: \"kubernetes.io/projected/337f7c79-e3bf-49ef-b783-9ac03df52fac-kube-api-access-vmd2b\") pod \"horizon-5ff7f46dd9-gj8q6\" (UID: \"337f7c79-e3bf-49ef-b783-9ac03df52fac\") " pod="openstack/horizon-5ff7f46dd9-gj8q6" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.222510 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/000282a3-18a6-4ea3-8055-ae4ad12ed82f-logs\") pod \"placement-db-sync-6s9q7\" (UID: \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\") " pod="openstack/placement-db-sync-6s9q7" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.222532 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pq7mw\" (UniqueName: \"kubernetes.io/projected/000282a3-18a6-4ea3-8055-ae4ad12ed82f-kube-api-access-pq7mw\") pod \"placement-db-sync-6s9q7\" (UID: \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\") " pod="openstack/placement-db-sync-6s9q7" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.222590 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/337f7c79-e3bf-49ef-b783-9ac03df52fac-scripts\") pod \"horizon-5ff7f46dd9-gj8q6\" (UID: \"337f7c79-e3bf-49ef-b783-9ac03df52fac\") " pod="openstack/horizon-5ff7f46dd9-gj8q6" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.222616 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/337f7c79-e3bf-49ef-b783-9ac03df52fac-logs\") pod \"horizon-5ff7f46dd9-gj8q6\" (UID: \"337f7c79-e3bf-49ef-b783-9ac03df52fac\") " pod="openstack/horizon-5ff7f46dd9-gj8q6" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.222670 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\") " pod="openstack/watcher-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.222715 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-config-data\") pod \"watcher-api-0\" (UID: \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\") " pod="openstack/watcher-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.222738 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\") " pod="openstack/watcher-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.228079 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-logs\") pod \"watcher-api-0\" (UID: \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\") " pod="openstack/watcher-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.228517 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\") " pod="openstack/watcher-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.240904 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/337f7c79-e3bf-49ef-b783-9ac03df52fac-scripts\") pod \"horizon-5ff7f46dd9-gj8q6\" (UID: \"337f7c79-e3bf-49ef-b783-9ac03df52fac\") " pod="openstack/horizon-5ff7f46dd9-gj8q6" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.241473 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-config-data\") pod \"watcher-applier-0\" (UID: \"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12\") " pod="openstack/watcher-applier-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.241531 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/337f7c79-e3bf-49ef-b783-9ac03df52fac-config-data\") pod \"horizon-5ff7f46dd9-gj8q6\" (UID: \"337f7c79-e3bf-49ef-b783-9ac03df52fac\") " pod="openstack/horizon-5ff7f46dd9-gj8q6" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.241618 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-a1c7-account-create-979gq"] Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.241991 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.242244 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/337f7c79-e3bf-49ef-b783-9ac03df52fac-logs\") pod \"horizon-5ff7f46dd9-gj8q6\" (UID: \"337f7c79-e3bf-49ef-b783-9ac03df52fac\") " pod="openstack/horizon-5ff7f46dd9-gj8q6" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.242506 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.243323 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12\") " pod="openstack/watcher-applier-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.260618 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/337f7c79-e3bf-49ef-b783-9ac03df52fac-horizon-secret-key\") pod \"horizon-5ff7f46dd9-gj8q6\" (UID: \"337f7c79-e3bf-49ef-b783-9ac03df52fac\") " pod="openstack/horizon-5ff7f46dd9-gj8q6" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.269050 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-config-data\") pod \"watcher-decision-engine-0\" (UID: \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.278319 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\") " pod="openstack/watcher-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.279679 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-config-data\") pod \"watcher-api-0\" (UID: \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\") " pod="openstack/watcher-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.280389 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7ldx\" (UniqueName: \"kubernetes.io/projected/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-kube-api-access-k7ldx\") pod \"watcher-api-0\" (UID: \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\") " pod="openstack/watcher-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.280992 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7cf77b4997-gvt8p"] Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.293470 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fh4ts\" (UniqueName: \"kubernetes.io/projected/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-kube-api-access-fh4ts\") pod \"watcher-applier-0\" (UID: \"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12\") " pod="openstack/watcher-applier-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.296444 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmd2b\" (UniqueName: \"kubernetes.io/projected/337f7c79-e3bf-49ef-b783-9ac03df52fac-kube-api-access-vmd2b\") pod \"horizon-5ff7f46dd9-gj8q6\" (UID: \"337f7c79-e3bf-49ef-b783-9ac03df52fac\") " pod="openstack/horizon-5ff7f46dd9-gj8q6" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.308373 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.330178 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/000282a3-18a6-4ea3-8055-ae4ad12ed82f-combined-ca-bundle\") pod \"placement-db-sync-6s9q7\" (UID: \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\") " pod="openstack/placement-db-sync-6s9q7" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.330237 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/000282a3-18a6-4ea3-8055-ae4ad12ed82f-logs\") pod \"placement-db-sync-6s9q7\" (UID: \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\") " pod="openstack/placement-db-sync-6s9q7" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.330255 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pq7mw\" (UniqueName: \"kubernetes.io/projected/000282a3-18a6-4ea3-8055-ae4ad12ed82f-kube-api-access-pq7mw\") pod \"placement-db-sync-6s9q7\" (UID: \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\") " pod="openstack/placement-db-sync-6s9q7" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.330289 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8brn\" (UniqueName: \"kubernetes.io/projected/e30e23d1-dd87-43e3-975f-38c9c67a63eb-kube-api-access-x8brn\") pod \"neutron-a1c7-account-create-979gq\" (UID: \"e30e23d1-dd87-43e3-975f-38c9c67a63eb\") " pod="openstack/neutron-a1c7-account-create-979gq" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.330374 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/000282a3-18a6-4ea3-8055-ae4ad12ed82f-config-data\") pod \"placement-db-sync-6s9q7\" (UID: \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\") " pod="openstack/placement-db-sync-6s9q7" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.330401 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/000282a3-18a6-4ea3-8055-ae4ad12ed82f-scripts\") pod \"placement-db-sync-6s9q7\" (UID: \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\") " pod="openstack/placement-db-sync-6s9q7" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.361713 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.363295 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.367521 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/000282a3-18a6-4ea3-8055-ae4ad12ed82f-scripts\") pod \"placement-db-sync-6s9q7\" (UID: \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\") " pod="openstack/placement-db-sync-6s9q7" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.367953 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/000282a3-18a6-4ea3-8055-ae4ad12ed82f-logs\") pod \"placement-db-sync-6s9q7\" (UID: \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\") " pod="openstack/placement-db-sync-6s9q7" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.373243 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pq7mw\" (UniqueName: \"kubernetes.io/projected/000282a3-18a6-4ea3-8055-ae4ad12ed82f-kube-api-access-pq7mw\") pod \"placement-db-sync-6s9q7\" (UID: \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\") " pod="openstack/placement-db-sync-6s9q7" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.373277 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-f48gr" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.373462 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.373488 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.378354 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.382214 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/000282a3-18a6-4ea3-8055-ae4ad12ed82f-config-data\") pod \"placement-db-sync-6s9q7\" (UID: \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\") " pod="openstack/placement-db-sync-6s9q7" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.382288 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/000282a3-18a6-4ea3-8055-ae4ad12ed82f-combined-ca-bundle\") pod \"placement-db-sync-6s9q7\" (UID: \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\") " pod="openstack/placement-db-sync-6s9q7" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.385741 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.388002 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.396684 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cf77b4997-gvt8p"] Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.402389 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.402593 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.434940 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-config-data\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.434988 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-dns-svc\") pod \"dnsmasq-dns-7cf77b4997-gvt8p\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.435031 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-config\") pod \"dnsmasq-dns-7cf77b4997-gvt8p\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.435047 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-ovsdbserver-nb\") pod \"dnsmasq-dns-7cf77b4997-gvt8p\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.435064 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2n5lm\" (UniqueName: \"kubernetes.io/projected/917f129c-762e-4483-917a-15f30ea8727d-kube-api-access-2n5lm\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.435093 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8brn\" (UniqueName: \"kubernetes.io/projected/e30e23d1-dd87-43e3-975f-38c9c67a63eb-kube-api-access-x8brn\") pod \"neutron-a1c7-account-create-979gq\" (UID: \"e30e23d1-dd87-43e3-975f-38c9c67a63eb\") " pod="openstack/neutron-a1c7-account-create-979gq" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.435111 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-ovsdbserver-sb\") pod \"dnsmasq-dns-7cf77b4997-gvt8p\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.435132 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-dns-swift-storage-0\") pod \"dnsmasq-dns-7cf77b4997-gvt8p\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.435149 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.435173 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.435220 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qd6f4\" (UniqueName: \"kubernetes.io/projected/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-kube-api-access-qd6f4\") pod \"dnsmasq-dns-7cf77b4997-gvt8p\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.435243 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.435268 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-scripts\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.435304 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/917f129c-762e-4483-917a-15f30ea8727d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.435326 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/917f129c-762e-4483-917a-15f30ea8727d-logs\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.438859 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.479241 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8brn\" (UniqueName: \"kubernetes.io/projected/e30e23d1-dd87-43e3-975f-38c9c67a63eb-kube-api-access-x8brn\") pod \"neutron-a1c7-account-create-979gq\" (UID: \"e30e23d1-dd87-43e3-975f-38c9c67a63eb\") " pod="openstack/neutron-a1c7-account-create-979gq" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.483171 4661 generic.go:334] "Generic (PLEG): container finished" podID="67e7eee6-ae30-4dad-88cb-6e22b9f85049" containerID="3a0402e040b0c9ae8eb4a1f588daa821f635bd2fa36ac9193dfc33e7a3ccdf17" exitCode=0 Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.483213 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" event={"ID":"67e7eee6-ae30-4dad-88cb-6e22b9f85049","Type":"ContainerDied","Data":"3a0402e040b0c9ae8eb4a1f588daa821f635bd2fa36ac9193dfc33e7a3ccdf17"} Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.510400 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-c6c7c854f-5kllc"] Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.511846 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-c6c7c854f-5kllc" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.533704 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.539857 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qd6f4\" (UniqueName: \"kubernetes.io/projected/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-kube-api-access-qd6f4\") pod \"dnsmasq-dns-7cf77b4997-gvt8p\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.539897 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7c73747-7ab3-4328-bec7-7708a39a50a2-run-httpd\") pod \"ceilometer-0\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " pod="openstack/ceilometer-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.539912 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7c73747-7ab3-4328-bec7-7708a39a50a2-log-httpd\") pod \"ceilometer-0\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " pod="openstack/ceilometer-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.539928 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.539949 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-scripts\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.539978 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-scripts\") pod \"ceilometer-0\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " pod="openstack/ceilometer-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.539996 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/917f129c-762e-4483-917a-15f30ea8727d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.540017 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/917f129c-762e-4483-917a-15f30ea8727d-logs\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.540040 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " pod="openstack/ceilometer-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.540058 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-config-data\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.540081 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cvpq\" (UniqueName: \"kubernetes.io/projected/a7c73747-7ab3-4328-bec7-7708a39a50a2-kube-api-access-8cvpq\") pod \"ceilometer-0\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " pod="openstack/ceilometer-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.540101 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-dns-svc\") pod \"dnsmasq-dns-7cf77b4997-gvt8p\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.540136 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-config\") pod \"dnsmasq-dns-7cf77b4997-gvt8p\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.540154 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-ovsdbserver-nb\") pod \"dnsmasq-dns-7cf77b4997-gvt8p\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.540169 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2n5lm\" (UniqueName: \"kubernetes.io/projected/917f129c-762e-4483-917a-15f30ea8727d-kube-api-access-2n5lm\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.540197 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-ovsdbserver-sb\") pod \"dnsmasq-dns-7cf77b4997-gvt8p\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.540213 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-dns-swift-storage-0\") pod \"dnsmasq-dns-7cf77b4997-gvt8p\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.540229 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.540250 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-config-data\") pod \"ceilometer-0\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " pod="openstack/ceilometer-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.540268 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.540284 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " pod="openstack/ceilometer-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.541873 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-dns-svc\") pod \"dnsmasq-dns-7cf77b4997-gvt8p\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.542362 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/917f129c-762e-4483-917a-15f30ea8727d-logs\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.542670 4661 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.545884 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-scripts\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.549232 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-dns-swift-storage-0\") pod \"dnsmasq-dns-7cf77b4997-gvt8p\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.550953 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/917f129c-762e-4483-917a-15f30ea8727d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.551324 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-config\") pod \"dnsmasq-dns-7cf77b4997-gvt8p\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.552512 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-ovsdbserver-nb\") pod \"dnsmasq-dns-7cf77b4997-gvt8p\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.552819 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-ovsdbserver-sb\") pod \"dnsmasq-dns-7cf77b4997-gvt8p\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.552822 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-config-data\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.558290 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.566504 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.578951 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-c6c7c854f-5kllc"] Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.585674 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qd6f4\" (UniqueName: \"kubernetes.io/projected/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-kube-api-access-qd6f4\") pod \"dnsmasq-dns-7cf77b4997-gvt8p\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.613380 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2n5lm\" (UniqueName: \"kubernetes.io/projected/917f129c-762e-4483-917a-15f30ea8727d-kube-api-access-2n5lm\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.624453 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-a7c0-account-create-qc4hg" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.643042 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.666840 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fb76e186-d5fa-497d-aca8-c887075b90c7-config-data\") pod \"horizon-c6c7c854f-5kllc\" (UID: \"fb76e186-d5fa-497d-aca8-c887075b90c7\") " pod="openstack/horizon-c6c7c854f-5kllc" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.666921 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-config-data\") pod \"ceilometer-0\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " pod="openstack/ceilometer-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.666942 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb76e186-d5fa-497d-aca8-c887075b90c7-logs\") pod \"horizon-c6c7c854f-5kllc\" (UID: \"fb76e186-d5fa-497d-aca8-c887075b90c7\") " pod="openstack/horizon-c6c7c854f-5kllc" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.666965 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " pod="openstack/ceilometer-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.667011 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fb76e186-d5fa-497d-aca8-c887075b90c7-horizon-secret-key\") pod \"horizon-c6c7c854f-5kllc\" (UID: \"fb76e186-d5fa-497d-aca8-c887075b90c7\") " pod="openstack/horizon-c6c7c854f-5kllc" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.667102 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7c73747-7ab3-4328-bec7-7708a39a50a2-run-httpd\") pod \"ceilometer-0\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " pod="openstack/ceilometer-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.667124 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7c73747-7ab3-4328-bec7-7708a39a50a2-log-httpd\") pod \"ceilometer-0\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " pod="openstack/ceilometer-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.667153 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fb76e186-d5fa-497d-aca8-c887075b90c7-scripts\") pod \"horizon-c6c7c854f-5kllc\" (UID: \"fb76e186-d5fa-497d-aca8-c887075b90c7\") " pod="openstack/horizon-c6c7c854f-5kllc" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.667192 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjb9t\" (UniqueName: \"kubernetes.io/projected/fb76e186-d5fa-497d-aca8-c887075b90c7-kube-api-access-tjb9t\") pod \"horizon-c6c7c854f-5kllc\" (UID: \"fb76e186-d5fa-497d-aca8-c887075b90c7\") " pod="openstack/horizon-c6c7c854f-5kllc" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.667221 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-scripts\") pod \"ceilometer-0\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " pod="openstack/ceilometer-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.667277 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " pod="openstack/ceilometer-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.667314 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cvpq\" (UniqueName: \"kubernetes.io/projected/a7c73747-7ab3-4328-bec7-7708a39a50a2-kube-api-access-8cvpq\") pod \"ceilometer-0\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " pod="openstack/ceilometer-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.667733 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.671362 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-cwwsl"] Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.679021 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.685818 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7c73747-7ab3-4328-bec7-7708a39a50a2-log-httpd\") pod \"ceilometer-0\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " pod="openstack/ceilometer-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.688395 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-cwwsl" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.689211 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7c73747-7ab3-4328-bec7-7708a39a50a2-run-httpd\") pod \"ceilometer-0\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " pod="openstack/ceilometer-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.690470 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.695178 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " pod="openstack/ceilometer-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.695382 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " pod="openstack/ceilometer-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.696418 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-scripts\") pod \"ceilometer-0\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " pod="openstack/ceilometer-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.698587 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-config-data\") pod \"ceilometer-0\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " pod="openstack/ceilometer-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.708457 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.708671 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-hks7z" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.712356 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-cwwsl"] Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.716424 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cvpq\" (UniqueName: \"kubernetes.io/projected/a7c73747-7ab3-4328-bec7-7708a39a50a2-kube-api-access-8cvpq\") pod \"ceilometer-0\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " pod="openstack/ceilometer-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.716706 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5ff7f46dd9-gj8q6" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.732713 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.734711 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.737229 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.737702 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-6s9q7" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.741149 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.759945 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.771268 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a1c7-account-create-979gq" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.785159 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9ae8568a-ecc3-429d-9717-0d05cf2e52d1-db-sync-config-data\") pod \"barbican-db-sync-cwwsl\" (UID: \"9ae8568a-ecc3-429d-9717-0d05cf2e52d1\") " pod="openstack/barbican-db-sync-cwwsl" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.785445 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fb76e186-d5fa-497d-aca8-c887075b90c7-config-data\") pod \"horizon-c6c7c854f-5kllc\" (UID: \"fb76e186-d5fa-497d-aca8-c887075b90c7\") " pod="openstack/horizon-c6c7c854f-5kllc" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.785522 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb76e186-d5fa-497d-aca8-c887075b90c7-logs\") pod \"horizon-c6c7c854f-5kllc\" (UID: \"fb76e186-d5fa-497d-aca8-c887075b90c7\") " pod="openstack/horizon-c6c7c854f-5kllc" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.785557 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fb76e186-d5fa-497d-aca8-c887075b90c7-horizon-secret-key\") pod \"horizon-c6c7c854f-5kllc\" (UID: \"fb76e186-d5fa-497d-aca8-c887075b90c7\") " pod="openstack/horizon-c6c7c854f-5kllc" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.785666 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ae8568a-ecc3-429d-9717-0d05cf2e52d1-combined-ca-bundle\") pod \"barbican-db-sync-cwwsl\" (UID: \"9ae8568a-ecc3-429d-9717-0d05cf2e52d1\") " pod="openstack/barbican-db-sync-cwwsl" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.785688 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fb76e186-d5fa-497d-aca8-c887075b90c7-scripts\") pod \"horizon-c6c7c854f-5kllc\" (UID: \"fb76e186-d5fa-497d-aca8-c887075b90c7\") " pod="openstack/horizon-c6c7c854f-5kllc" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.785709 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkw8h\" (UniqueName: \"kubernetes.io/projected/9ae8568a-ecc3-429d-9717-0d05cf2e52d1-kube-api-access-nkw8h\") pod \"barbican-db-sync-cwwsl\" (UID: \"9ae8568a-ecc3-429d-9717-0d05cf2e52d1\") " pod="openstack/barbican-db-sync-cwwsl" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.785748 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjb9t\" (UniqueName: \"kubernetes.io/projected/fb76e186-d5fa-497d-aca8-c887075b90c7-kube-api-access-tjb9t\") pod \"horizon-c6c7c854f-5kllc\" (UID: \"fb76e186-d5fa-497d-aca8-c887075b90c7\") " pod="openstack/horizon-c6c7c854f-5kllc" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.787777 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fb76e186-d5fa-497d-aca8-c887075b90c7-scripts\") pod \"horizon-c6c7c854f-5kllc\" (UID: \"fb76e186-d5fa-497d-aca8-c887075b90c7\") " pod="openstack/horizon-c6c7c854f-5kllc" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.788687 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fb76e186-d5fa-497d-aca8-c887075b90c7-config-data\") pod \"horizon-c6c7c854f-5kllc\" (UID: \"fb76e186-d5fa-497d-aca8-c887075b90c7\") " pod="openstack/horizon-c6c7c854f-5kllc" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.788921 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb76e186-d5fa-497d-aca8-c887075b90c7-logs\") pod \"horizon-c6c7c854f-5kllc\" (UID: \"fb76e186-d5fa-497d-aca8-c887075b90c7\") " pod="openstack/horizon-c6c7c854f-5kllc" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.795529 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.807555 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fb76e186-d5fa-497d-aca8-c887075b90c7-horizon-secret-key\") pod \"horizon-c6c7c854f-5kllc\" (UID: \"fb76e186-d5fa-497d-aca8-c887075b90c7\") " pod="openstack/horizon-c6c7c854f-5kllc" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.815555 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjb9t\" (UniqueName: \"kubernetes.io/projected/fb76e186-d5fa-497d-aca8-c887075b90c7-kube-api-access-tjb9t\") pod \"horizon-c6c7c854f-5kllc\" (UID: \"fb76e186-d5fa-497d-aca8-c887075b90c7\") " pod="openstack/horizon-c6c7c854f-5kllc" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.828227 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.896622 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7grzf\" (UniqueName: \"kubernetes.io/projected/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-kube-api-access-7grzf\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.896675 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.896698 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.896744 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ae8568a-ecc3-429d-9717-0d05cf2e52d1-combined-ca-bundle\") pod \"barbican-db-sync-cwwsl\" (UID: \"9ae8568a-ecc3-429d-9717-0d05cf2e52d1\") " pod="openstack/barbican-db-sync-cwwsl" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.896775 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkw8h\" (UniqueName: \"kubernetes.io/projected/9ae8568a-ecc3-429d-9717-0d05cf2e52d1-kube-api-access-nkw8h\") pod \"barbican-db-sync-cwwsl\" (UID: \"9ae8568a-ecc3-429d-9717-0d05cf2e52d1\") " pod="openstack/barbican-db-sync-cwwsl" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.896846 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-logs\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.896912 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9ae8568a-ecc3-429d-9717-0d05cf2e52d1-db-sync-config-data\") pod \"barbican-db-sync-cwwsl\" (UID: \"9ae8568a-ecc3-429d-9717-0d05cf2e52d1\") " pod="openstack/barbican-db-sync-cwwsl" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.896950 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.897049 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.897067 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.897090 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.915797 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ae8568a-ecc3-429d-9717-0d05cf2e52d1-combined-ca-bundle\") pod \"barbican-db-sync-cwwsl\" (UID: \"9ae8568a-ecc3-429d-9717-0d05cf2e52d1\") " pod="openstack/barbican-db-sync-cwwsl" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.930341 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9ae8568a-ecc3-429d-9717-0d05cf2e52d1-db-sync-config-data\") pod \"barbican-db-sync-cwwsl\" (UID: \"9ae8568a-ecc3-429d-9717-0d05cf2e52d1\") " pod="openstack/barbican-db-sync-cwwsl" Oct 01 05:47:22 crc kubenswrapper[4661]: I1001 05:47:22.934182 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkw8h\" (UniqueName: \"kubernetes.io/projected/9ae8568a-ecc3-429d-9717-0d05cf2e52d1-kube-api-access-nkw8h\") pod \"barbican-db-sync-cwwsl\" (UID: \"9ae8568a-ecc3-429d-9717-0d05cf2e52d1\") " pod="openstack/barbican-db-sync-cwwsl" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:22.998560 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:22.998918 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:22.999358 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:22.998935 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:22.999409 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:22.999458 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7grzf\" (UniqueName: \"kubernetes.io/projected/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-kube-api-access-7grzf\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:22.999478 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:22.999492 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:22.999540 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-logs\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:22.999798 4661 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:22.999918 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-logs\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.007360 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.009585 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.015939 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.016097 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.016616 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7grzf\" (UniqueName: \"kubernetes.io/projected/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-kube-api-access-7grzf\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.027719 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.131077 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.144327 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-c6c7c854f-5kllc" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.153067 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-cwwsl" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.171690 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.396537 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.495605 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" event={"ID":"67e7eee6-ae30-4dad-88cb-6e22b9f85049","Type":"ContainerDied","Data":"624c577f02990ad7754fd32f41c182c7aaf74e0abdd3e8d0c2cb7dd3ad55271a"} Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.495686 4661 scope.go:117] "RemoveContainer" containerID="3a0402e040b0c9ae8eb4a1f588daa821f635bd2fa36ac9193dfc33e7a3ccdf17" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.495737 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dd98f6d57-t2r8r" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.514486 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-config\") pod \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.514602 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-ovsdbserver-nb\") pod \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.514642 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xpbhd\" (UniqueName: \"kubernetes.io/projected/67e7eee6-ae30-4dad-88cb-6e22b9f85049-kube-api-access-xpbhd\") pod \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.514687 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-ovsdbserver-sb\") pod \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.514715 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-dns-svc\") pod \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.514765 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-dns-swift-storage-0\") pod \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\" (UID: \"67e7eee6-ae30-4dad-88cb-6e22b9f85049\") " Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.527297 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67e7eee6-ae30-4dad-88cb-6e22b9f85049-kube-api-access-xpbhd" (OuterVolumeSpecName: "kube-api-access-xpbhd") pod "67e7eee6-ae30-4dad-88cb-6e22b9f85049" (UID: "67e7eee6-ae30-4dad-88cb-6e22b9f85049"). InnerVolumeSpecName "kube-api-access-xpbhd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.540945 4661 scope.go:117] "RemoveContainer" containerID="8ebfc3004d0b00dc074deae9d9c6ea849ac406a80b9152e8eb6ceda847bd6119" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.592782 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "67e7eee6-ae30-4dad-88cb-6e22b9f85049" (UID: "67e7eee6-ae30-4dad-88cb-6e22b9f85049"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.616879 4661 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.624114 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xpbhd\" (UniqueName: \"kubernetes.io/projected/67e7eee6-ae30-4dad-88cb-6e22b9f85049-kube-api-access-xpbhd\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.618108 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-config" (OuterVolumeSpecName: "config") pod "67e7eee6-ae30-4dad-88cb-6e22b9f85049" (UID: "67e7eee6-ae30-4dad-88cb-6e22b9f85049"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.637719 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "67e7eee6-ae30-4dad-88cb-6e22b9f85049" (UID: "67e7eee6-ae30-4dad-88cb-6e22b9f85049"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.645857 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55d459d457-scvxh"] Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.661082 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "67e7eee6-ae30-4dad-88cb-6e22b9f85049" (UID: "67e7eee6-ae30-4dad-88cb-6e22b9f85049"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.664908 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7v9qh"] Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.674339 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "67e7eee6-ae30-4dad-88cb-6e22b9f85049" (UID: "67e7eee6-ae30-4dad-88cb-6e22b9f85049"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.725696 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.725730 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.725739 4661 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.725749 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67e7eee6-ae30-4dad-88cb-6e22b9f85049-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.956843 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5dd98f6d57-t2r8r"] Oct 01 05:47:23 crc kubenswrapper[4661]: I1001 05:47:23.970059 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5dd98f6d57-t2r8r"] Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.079683 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-a1c7-account-create-979gq"] Oct 01 05:47:24 crc kubenswrapper[4661]: W1001 05:47:24.101481 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87a9e404_beb1_4f1d_a7a2_188ccdacbb81.slice/crio-ec5d81f979af53b5613a1941f4065a44125ce4715a01d8c6645f59740b91964c WatchSource:0}: Error finding container ec5d81f979af53b5613a1941f4065a44125ce4715a01d8c6645f59740b91964c: Status 404 returned error can't find the container with id ec5d81f979af53b5613a1941f4065a44125ce4715a01d8c6645f59740b91964c Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.106973 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.139389 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5ff7f46dd9-gj8q6"] Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.146516 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.173240 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.210366 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-a7c0-account-create-qc4hg"] Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.462969 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.486764 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cf77b4997-gvt8p"] Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.544503 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7v9qh" event={"ID":"788e9378-cdc8-4a59-8d19-4ace345b122f","Type":"ContainerStarted","Data":"259982fb4b657562c20206c08d94cf8f2e864b923f2d6af89da07aa5071b9acc"} Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.544546 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7v9qh" event={"ID":"788e9378-cdc8-4a59-8d19-4ace345b122f","Type":"ContainerStarted","Data":"b58af04c34a9e8bd4afee1406cf2409328595ba0e85557b801701bd2c79a0df4"} Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.558807 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-6s9q7"] Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.570026 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-7v9qh" podStartSLOduration=3.570010871 podStartE2EDuration="3.570010871s" podCreationTimestamp="2025-10-01 05:47:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:47:24.569115296 +0000 UTC m=+1093.507093900" watchObservedRunningTime="2025-10-01 05:47:24.570010871 +0000 UTC m=+1093.507989485" Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.590104 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" event={"ID":"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705","Type":"ContainerStarted","Data":"4a2cb94d0b5e0ef9abf294551c5e31cbcbaeb536a6e2e98cd5df7abebc1c51f5"} Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.612411 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-a1c7-account-create-979gq" event={"ID":"e30e23d1-dd87-43e3-975f-38c9c67a63eb","Type":"ContainerStarted","Data":"d9745e974c0817c5ce86fd92b9ce15c9d9d04c9b0506500519f1379e4a818036"} Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.612457 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-a1c7-account-create-979gq" event={"ID":"e30e23d1-dd87-43e3-975f-38c9c67a63eb","Type":"ContainerStarted","Data":"1507392f252865282c4f83db82ad7548d99ae5c8badcb861b0ab749c71618d42"} Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.613065 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.614687 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5ff7f46dd9-gj8q6" event={"ID":"337f7c79-e3bf-49ef-b783-9ac03df52fac","Type":"ContainerStarted","Data":"965728f85c5dc8ebd0fa1b91dad06c83bdf64c4eae759009f29413a2a663e5d2"} Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.633283 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"87a9e404-beb1-4f1d-a7a2-188ccdacbb81","Type":"ContainerStarted","Data":"ec5d81f979af53b5613a1941f4065a44125ce4715a01d8c6645f59740b91964c"} Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.644203 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"ea06b280-9a36-48a6-a5b8-b2f0f342bf66","Type":"ContainerStarted","Data":"be3e731d9ed66958b005096fad3f51bdc1d878e2c81cc2d6d2ce4f562cd28309"} Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.648833 4661 generic.go:334] "Generic (PLEG): container finished" podID="de7b601b-11d7-48a5-9c1b-65a8a410138f" containerID="6986e2346355971650989847c9faad64740f17bd192d057d8671df1a8d25d3bc" exitCode=0 Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.648902 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55d459d457-scvxh" event={"ID":"de7b601b-11d7-48a5-9c1b-65a8a410138f","Type":"ContainerDied","Data":"6986e2346355971650989847c9faad64740f17bd192d057d8671df1a8d25d3bc"} Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.648942 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55d459d457-scvxh" event={"ID":"de7b601b-11d7-48a5-9c1b-65a8a410138f","Type":"ContainerStarted","Data":"1ea999ce71bfd09629ce485468ded34728cce9fc785a9e961a023b6ddb111e6f"} Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.658673 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-a7c0-account-create-qc4hg" event={"ID":"4d174eeb-ff97-4090-9f9a-d30f97bd926e","Type":"ContainerStarted","Data":"4092a45be262474f59a4a32207b8a1c491e5af90c203dcfdd14f817c3b1211fc"} Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.658711 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-a7c0-account-create-qc4hg" event={"ID":"4d174eeb-ff97-4090-9f9a-d30f97bd926e","Type":"ContainerStarted","Data":"b7dfce085728ee97ede93b549c61fcadf1f7fb536a53df8eb26e8b7011878375"} Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.693782 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12","Type":"ContainerStarted","Data":"a1d7048dd092069ae85d8868a506fe36b5db28e2eac1de313effd5ef916c5c8b"} Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.809951 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-cwwsl"] Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.898142 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-c6c7c854f-5kllc"] Oct 01 05:47:24 crc kubenswrapper[4661]: I1001 05:47:24.930377 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.192700 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.247170 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5ff7f46dd9-gj8q6"] Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.369402 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7fbf467685-6nvqq"] Oct 01 05:47:25 crc kubenswrapper[4661]: E1001 05:47:25.369808 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67e7eee6-ae30-4dad-88cb-6e22b9f85049" containerName="dnsmasq-dns" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.369822 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="67e7eee6-ae30-4dad-88cb-6e22b9f85049" containerName="dnsmasq-dns" Oct 01 05:47:25 crc kubenswrapper[4661]: E1001 05:47:25.369846 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67e7eee6-ae30-4dad-88cb-6e22b9f85049" containerName="init" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.369853 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="67e7eee6-ae30-4dad-88cb-6e22b9f85049" containerName="init" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.370049 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="67e7eee6-ae30-4dad-88cb-6e22b9f85049" containerName="dnsmasq-dns" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.371212 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7fbf467685-6nvqq" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.393963 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.435705 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7fbf467685-6nvqq"] Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.471145 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55d459d457-scvxh" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.514002 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjdvn\" (UniqueName: \"kubernetes.io/projected/60bcbef6-72b1-41e7-9871-ad2945197629-kube-api-access-jjdvn\") pod \"horizon-7fbf467685-6nvqq\" (UID: \"60bcbef6-72b1-41e7-9871-ad2945197629\") " pod="openstack/horizon-7fbf467685-6nvqq" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.514056 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/60bcbef6-72b1-41e7-9871-ad2945197629-scripts\") pod \"horizon-7fbf467685-6nvqq\" (UID: \"60bcbef6-72b1-41e7-9871-ad2945197629\") " pod="openstack/horizon-7fbf467685-6nvqq" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.514102 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/60bcbef6-72b1-41e7-9871-ad2945197629-config-data\") pod \"horizon-7fbf467685-6nvqq\" (UID: \"60bcbef6-72b1-41e7-9871-ad2945197629\") " pod="openstack/horizon-7fbf467685-6nvqq" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.514148 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/60bcbef6-72b1-41e7-9871-ad2945197629-horizon-secret-key\") pod \"horizon-7fbf467685-6nvqq\" (UID: \"60bcbef6-72b1-41e7-9871-ad2945197629\") " pod="openstack/horizon-7fbf467685-6nvqq" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.514171 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60bcbef6-72b1-41e7-9871-ad2945197629-logs\") pod \"horizon-7fbf467685-6nvqq\" (UID: \"60bcbef6-72b1-41e7-9871-ad2945197629\") " pod="openstack/horizon-7fbf467685-6nvqq" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.533502 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.573175 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.617311 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-dns-swift-storage-0\") pod \"de7b601b-11d7-48a5-9c1b-65a8a410138f\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.617371 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-config\") pod \"de7b601b-11d7-48a5-9c1b-65a8a410138f\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.617480 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-ovsdbserver-nb\") pod \"de7b601b-11d7-48a5-9c1b-65a8a410138f\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.617581 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-ovsdbserver-sb\") pod \"de7b601b-11d7-48a5-9c1b-65a8a410138f\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.617612 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-dns-svc\") pod \"de7b601b-11d7-48a5-9c1b-65a8a410138f\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.617751 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nghhw\" (UniqueName: \"kubernetes.io/projected/de7b601b-11d7-48a5-9c1b-65a8a410138f-kube-api-access-nghhw\") pod \"de7b601b-11d7-48a5-9c1b-65a8a410138f\" (UID: \"de7b601b-11d7-48a5-9c1b-65a8a410138f\") " Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.618027 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/60bcbef6-72b1-41e7-9871-ad2945197629-horizon-secret-key\") pod \"horizon-7fbf467685-6nvqq\" (UID: \"60bcbef6-72b1-41e7-9871-ad2945197629\") " pod="openstack/horizon-7fbf467685-6nvqq" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.618054 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60bcbef6-72b1-41e7-9871-ad2945197629-logs\") pod \"horizon-7fbf467685-6nvqq\" (UID: \"60bcbef6-72b1-41e7-9871-ad2945197629\") " pod="openstack/horizon-7fbf467685-6nvqq" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.618122 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjdvn\" (UniqueName: \"kubernetes.io/projected/60bcbef6-72b1-41e7-9871-ad2945197629-kube-api-access-jjdvn\") pod \"horizon-7fbf467685-6nvqq\" (UID: \"60bcbef6-72b1-41e7-9871-ad2945197629\") " pod="openstack/horizon-7fbf467685-6nvqq" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.618160 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/60bcbef6-72b1-41e7-9871-ad2945197629-scripts\") pod \"horizon-7fbf467685-6nvqq\" (UID: \"60bcbef6-72b1-41e7-9871-ad2945197629\") " pod="openstack/horizon-7fbf467685-6nvqq" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.618205 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/60bcbef6-72b1-41e7-9871-ad2945197629-config-data\") pod \"horizon-7fbf467685-6nvqq\" (UID: \"60bcbef6-72b1-41e7-9871-ad2945197629\") " pod="openstack/horizon-7fbf467685-6nvqq" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.618822 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60bcbef6-72b1-41e7-9871-ad2945197629-logs\") pod \"horizon-7fbf467685-6nvqq\" (UID: \"60bcbef6-72b1-41e7-9871-ad2945197629\") " pod="openstack/horizon-7fbf467685-6nvqq" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.619379 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/60bcbef6-72b1-41e7-9871-ad2945197629-config-data\") pod \"horizon-7fbf467685-6nvqq\" (UID: \"60bcbef6-72b1-41e7-9871-ad2945197629\") " pod="openstack/horizon-7fbf467685-6nvqq" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.620140 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/60bcbef6-72b1-41e7-9871-ad2945197629-scripts\") pod \"horizon-7fbf467685-6nvqq\" (UID: \"60bcbef6-72b1-41e7-9871-ad2945197629\") " pod="openstack/horizon-7fbf467685-6nvqq" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.639045 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/60bcbef6-72b1-41e7-9871-ad2945197629-horizon-secret-key\") pod \"horizon-7fbf467685-6nvqq\" (UID: \"60bcbef6-72b1-41e7-9871-ad2945197629\") " pod="openstack/horizon-7fbf467685-6nvqq" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.664461 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "de7b601b-11d7-48a5-9c1b-65a8a410138f" (UID: "de7b601b-11d7-48a5-9c1b-65a8a410138f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.665278 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de7b601b-11d7-48a5-9c1b-65a8a410138f-kube-api-access-nghhw" (OuterVolumeSpecName: "kube-api-access-nghhw") pod "de7b601b-11d7-48a5-9c1b-65a8a410138f" (UID: "de7b601b-11d7-48a5-9c1b-65a8a410138f"). InnerVolumeSpecName "kube-api-access-nghhw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.671459 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjdvn\" (UniqueName: \"kubernetes.io/projected/60bcbef6-72b1-41e7-9871-ad2945197629-kube-api-access-jjdvn\") pod \"horizon-7fbf467685-6nvqq\" (UID: \"60bcbef6-72b1-41e7-9871-ad2945197629\") " pod="openstack/horizon-7fbf467685-6nvqq" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.684119 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "de7b601b-11d7-48a5-9c1b-65a8a410138f" (UID: "de7b601b-11d7-48a5-9c1b-65a8a410138f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.695047 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-config" (OuterVolumeSpecName: "config") pod "de7b601b-11d7-48a5-9c1b-65a8a410138f" (UID: "de7b601b-11d7-48a5-9c1b-65a8a410138f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.698956 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "de7b601b-11d7-48a5-9c1b-65a8a410138f" (UID: "de7b601b-11d7-48a5-9c1b-65a8a410138f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.701534 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "de7b601b-11d7-48a5-9c1b-65a8a410138f" (UID: "de7b601b-11d7-48a5-9c1b-65a8a410138f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.719829 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nghhw\" (UniqueName: \"kubernetes.io/projected/de7b601b-11d7-48a5-9c1b-65a8a410138f-kube-api-access-nghhw\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.719858 4661 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.719868 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.720023 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.720034 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.720068 4661 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de7b601b-11d7-48a5-9c1b-65a8a410138f-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.738755 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-6s9q7" event={"ID":"000282a3-18a6-4ea3-8055-ae4ad12ed82f","Type":"ContainerStarted","Data":"76d7a90a753d4fc15e44f8d8a7e4a847cff8df1fa3cb8e1a7521215053971136"} Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.742370 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7fbf467685-6nvqq" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.750319 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c6c7c854f-5kllc" event={"ID":"fb76e186-d5fa-497d-aca8-c887075b90c7","Type":"ContainerStarted","Data":"aad5678815040ff64a3274b5ec6bcb29f0dfb04e621db8a8e22846887481bef2"} Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.752978 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"917f129c-762e-4483-917a-15f30ea8727d","Type":"ContainerStarted","Data":"5eab942f7bfca8cd48b35ce3086ec60cee329cae01bb97f436310d0766ce781a"} Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.759057 4661 generic.go:334] "Generic (PLEG): container finished" podID="e30e23d1-dd87-43e3-975f-38c9c67a63eb" containerID="d9745e974c0817c5ce86fd92b9ce15c9d9d04c9b0506500519f1379e4a818036" exitCode=0 Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.776782 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67e7eee6-ae30-4dad-88cb-6e22b9f85049" path="/var/lib/kubelet/pods/67e7eee6-ae30-4dad-88cb-6e22b9f85049/volumes" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.777376 4661 generic.go:334] "Generic (PLEG): container finished" podID="4d174eeb-ff97-4090-9f9a-d30f97bd926e" containerID="4092a45be262474f59a4a32207b8a1c491e5af90c203dcfdd14f817c3b1211fc" exitCode=0 Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.777452 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-a1c7-account-create-979gq" event={"ID":"e30e23d1-dd87-43e3-975f-38c9c67a63eb","Type":"ContainerDied","Data":"d9745e974c0817c5ce86fd92b9ce15c9d9d04c9b0506500519f1379e4a818036"} Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.777476 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7c73747-7ab3-4328-bec7-7708a39a50a2","Type":"ContainerStarted","Data":"c80b010fd48f21b2a4e7c78754975049fbec743b8f1f668f8e8e0345f9c1aab2"} Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.777487 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-a7c0-account-create-qc4hg" event={"ID":"4d174eeb-ff97-4090-9f9a-d30f97bd926e","Type":"ContainerDied","Data":"4092a45be262474f59a4a32207b8a1c491e5af90c203dcfdd14f817c3b1211fc"} Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.791720 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55d459d457-scvxh" event={"ID":"de7b601b-11d7-48a5-9c1b-65a8a410138f","Type":"ContainerDied","Data":"1ea999ce71bfd09629ce485468ded34728cce9fc785a9e961a023b6ddb111e6f"} Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.791770 4661 scope.go:117] "RemoveContainer" containerID="6986e2346355971650989847c9faad64740f17bd192d057d8671df1a8d25d3bc" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.791884 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55d459d457-scvxh" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.807882 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-cwwsl" event={"ID":"9ae8568a-ecc3-429d-9717-0d05cf2e52d1","Type":"ContainerStarted","Data":"6a8d034a0bf1ef13c95a855b156085db6bc6e02e87fd11cd5d7b8a83b6cda3d7"} Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.824140 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" event={"ID":"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705","Type":"ContainerStarted","Data":"71e76f677846f83c6c32de3d9b8a14b14e370a071a06027ba1a2f98065f3c230"} Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.836674 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ed164c2a-4c12-4eb0-a6d5-e37d0818a485","Type":"ContainerStarted","Data":"ab0a59b3b5e85e14148e1acfe9c9c69cc0d2c5a90ed76a7f7d68d73446a79641"} Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.853678 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="ea06b280-9a36-48a6-a5b8-b2f0f342bf66" containerName="watcher-api-log" containerID="cri-o://36a4fbb35eb02a000230d46293011daca3ec842330ee75ef956e63d7ad11bd19" gracePeriod=30 Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.853906 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"ea06b280-9a36-48a6-a5b8-b2f0f342bf66","Type":"ContainerStarted","Data":"77ccfbca6f6e32ac52c29498536849a6d5b4656d446858ebc9537db59648d587"} Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.853932 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"ea06b280-9a36-48a6-a5b8-b2f0f342bf66","Type":"ContainerStarted","Data":"36a4fbb35eb02a000230d46293011daca3ec842330ee75ef956e63d7ad11bd19"} Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.854240 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="ea06b280-9a36-48a6-a5b8-b2f0f342bf66" containerName="watcher-api" containerID="cri-o://77ccfbca6f6e32ac52c29498536849a6d5b4656d446858ebc9537db59648d587" gracePeriod=30 Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.854289 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.878404 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55d459d457-scvxh"] Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.904887 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55d459d457-scvxh"] Oct 01 05:47:25 crc kubenswrapper[4661]: I1001 05:47:25.914397 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=4.914375641 podStartE2EDuration="4.914375641s" podCreationTimestamp="2025-10-01 05:47:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:47:25.904742626 +0000 UTC m=+1094.842721260" watchObservedRunningTime="2025-10-01 05:47:25.914375641 +0000 UTC m=+1094.852354255" Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.318909 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="ea06b280-9a36-48a6-a5b8-b2f0f342bf66" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.151:9322/\": read tcp 10.217.0.2:59046->10.217.0.151:9322: read: connection reset by peer" Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.520130 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-a7c0-account-create-qc4hg" Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.520503 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a1c7-account-create-979gq" Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.607912 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7fbf467685-6nvqq"] Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.657570 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8brn\" (UniqueName: \"kubernetes.io/projected/e30e23d1-dd87-43e3-975f-38c9c67a63eb-kube-api-access-x8brn\") pod \"e30e23d1-dd87-43e3-975f-38c9c67a63eb\" (UID: \"e30e23d1-dd87-43e3-975f-38c9c67a63eb\") " Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.657664 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bs6r\" (UniqueName: \"kubernetes.io/projected/4d174eeb-ff97-4090-9f9a-d30f97bd926e-kube-api-access-7bs6r\") pod \"4d174eeb-ff97-4090-9f9a-d30f97bd926e\" (UID: \"4d174eeb-ff97-4090-9f9a-d30f97bd926e\") " Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.668382 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d174eeb-ff97-4090-9f9a-d30f97bd926e-kube-api-access-7bs6r" (OuterVolumeSpecName: "kube-api-access-7bs6r") pod "4d174eeb-ff97-4090-9f9a-d30f97bd926e" (UID: "4d174eeb-ff97-4090-9f9a-d30f97bd926e"). InnerVolumeSpecName "kube-api-access-7bs6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.676095 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e30e23d1-dd87-43e3-975f-38c9c67a63eb-kube-api-access-x8brn" (OuterVolumeSpecName: "kube-api-access-x8brn") pod "e30e23d1-dd87-43e3-975f-38c9c67a63eb" (UID: "e30e23d1-dd87-43e3-975f-38c9c67a63eb"). InnerVolumeSpecName "kube-api-access-x8brn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.761567 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8brn\" (UniqueName: \"kubernetes.io/projected/e30e23d1-dd87-43e3-975f-38c9c67a63eb-kube-api-access-x8brn\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.761595 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bs6r\" (UniqueName: \"kubernetes.io/projected/4d174eeb-ff97-4090-9f9a-d30f97bd926e-kube-api-access-7bs6r\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.889406 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-a7c0-account-create-qc4hg" event={"ID":"4d174eeb-ff97-4090-9f9a-d30f97bd926e","Type":"ContainerDied","Data":"b7dfce085728ee97ede93b549c61fcadf1f7fb536a53df8eb26e8b7011878375"} Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.889749 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b7dfce085728ee97ede93b549c61fcadf1f7fb536a53df8eb26e8b7011878375" Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.889453 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-a7c0-account-create-qc4hg" Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.890916 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7fbf467685-6nvqq" event={"ID":"60bcbef6-72b1-41e7-9871-ad2945197629","Type":"ContainerStarted","Data":"a968d9559ab95035b58d87618b755e39f127cf4619d342515817c08e242950a1"} Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.892906 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"917f129c-762e-4483-917a-15f30ea8727d","Type":"ContainerStarted","Data":"4b1f79d5d6c33963aa97163df184d53bfbe1c00d491dc4b9cc4aa4e115f27c83"} Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.894938 4661 generic.go:334] "Generic (PLEG): container finished" podID="e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705" containerID="71e76f677846f83c6c32de3d9b8a14b14e370a071a06027ba1a2f98065f3c230" exitCode=0 Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.894981 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" event={"ID":"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705","Type":"ContainerDied","Data":"71e76f677846f83c6c32de3d9b8a14b14e370a071a06027ba1a2f98065f3c230"} Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.894995 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" event={"ID":"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705","Type":"ContainerStarted","Data":"25f568acfce87b192068418e4b8f2db5ffe4fb859f21397977b5a78a5bb6f559"} Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.896152 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.898673 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-a1c7-account-create-979gq" event={"ID":"e30e23d1-dd87-43e3-975f-38c9c67a63eb","Type":"ContainerDied","Data":"1507392f252865282c4f83db82ad7548d99ae5c8badcb861b0ab749c71618d42"} Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.898698 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1507392f252865282c4f83db82ad7548d99ae5c8badcb861b0ab749c71618d42" Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.898721 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a1c7-account-create-979gq" Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.923581 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" podStartSLOduration=4.923566407 podStartE2EDuration="4.923566407s" podCreationTimestamp="2025-10-01 05:47:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:47:26.91747819 +0000 UTC m=+1095.855456824" watchObservedRunningTime="2025-10-01 05:47:26.923566407 +0000 UTC m=+1095.861545011" Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.928391 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ed164c2a-4c12-4eb0-a6d5-e37d0818a485","Type":"ContainerStarted","Data":"0e0699d367b1f9b5183f6c53f3a7ad35da88f927509ba1de72bcfa012efa1a1b"} Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.932809 4661 generic.go:334] "Generic (PLEG): container finished" podID="ea06b280-9a36-48a6-a5b8-b2f0f342bf66" containerID="77ccfbca6f6e32ac52c29498536849a6d5b4656d446858ebc9537db59648d587" exitCode=0 Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.932831 4661 generic.go:334] "Generic (PLEG): container finished" podID="ea06b280-9a36-48a6-a5b8-b2f0f342bf66" containerID="36a4fbb35eb02a000230d46293011daca3ec842330ee75ef956e63d7ad11bd19" exitCode=143 Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.932867 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"ea06b280-9a36-48a6-a5b8-b2f0f342bf66","Type":"ContainerDied","Data":"77ccfbca6f6e32ac52c29498536849a6d5b4656d446858ebc9537db59648d587"} Oct 01 05:47:26 crc kubenswrapper[4661]: I1001 05:47:26.932882 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"ea06b280-9a36-48a6-a5b8-b2f0f342bf66","Type":"ContainerDied","Data":"36a4fbb35eb02a000230d46293011daca3ec842330ee75ef956e63d7ad11bd19"} Oct 01 05:47:27 crc kubenswrapper[4661]: I1001 05:47:27.403510 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Oct 01 05:47:27 crc kubenswrapper[4661]: I1001 05:47:27.595874 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-logs\") pod \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\" (UID: \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\") " Oct 01 05:47:27 crc kubenswrapper[4661]: I1001 05:47:27.596205 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-logs" (OuterVolumeSpecName: "logs") pod "ea06b280-9a36-48a6-a5b8-b2f0f342bf66" (UID: "ea06b280-9a36-48a6-a5b8-b2f0f342bf66"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:47:27 crc kubenswrapper[4661]: I1001 05:47:27.596304 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-config-data\") pod \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\" (UID: \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\") " Oct 01 05:47:27 crc kubenswrapper[4661]: I1001 05:47:27.596345 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7ldx\" (UniqueName: \"kubernetes.io/projected/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-kube-api-access-k7ldx\") pod \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\" (UID: \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\") " Oct 01 05:47:27 crc kubenswrapper[4661]: I1001 05:47:27.596487 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-custom-prometheus-ca\") pod \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\" (UID: \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\") " Oct 01 05:47:27 crc kubenswrapper[4661]: I1001 05:47:27.597018 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-combined-ca-bundle\") pod \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\" (UID: \"ea06b280-9a36-48a6-a5b8-b2f0f342bf66\") " Oct 01 05:47:27 crc kubenswrapper[4661]: I1001 05:47:27.597380 4661 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-logs\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:27 crc kubenswrapper[4661]: I1001 05:47:27.603416 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-kube-api-access-k7ldx" (OuterVolumeSpecName: "kube-api-access-k7ldx") pod "ea06b280-9a36-48a6-a5b8-b2f0f342bf66" (UID: "ea06b280-9a36-48a6-a5b8-b2f0f342bf66"). InnerVolumeSpecName "kube-api-access-k7ldx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:47:27 crc kubenswrapper[4661]: I1001 05:47:27.622347 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ea06b280-9a36-48a6-a5b8-b2f0f342bf66" (UID: "ea06b280-9a36-48a6-a5b8-b2f0f342bf66"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:27 crc kubenswrapper[4661]: I1001 05:47:27.648709 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "ea06b280-9a36-48a6-a5b8-b2f0f342bf66" (UID: "ea06b280-9a36-48a6-a5b8-b2f0f342bf66"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:27 crc kubenswrapper[4661]: I1001 05:47:27.659232 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-config-data" (OuterVolumeSpecName: "config-data") pod "ea06b280-9a36-48a6-a5b8-b2f0f342bf66" (UID: "ea06b280-9a36-48a6-a5b8-b2f0f342bf66"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:27 crc kubenswrapper[4661]: I1001 05:47:27.699030 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:27 crc kubenswrapper[4661]: I1001 05:47:27.699060 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7ldx\" (UniqueName: \"kubernetes.io/projected/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-kube-api-access-k7ldx\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:27 crc kubenswrapper[4661]: I1001 05:47:27.699071 4661 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:27 crc kubenswrapper[4661]: I1001 05:47:27.699079 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea06b280-9a36-48a6-a5b8-b2f0f342bf66-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:27 crc kubenswrapper[4661]: I1001 05:47:27.777315 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de7b601b-11d7-48a5-9c1b-65a8a410138f" path="/var/lib/kubelet/pods/de7b601b-11d7-48a5-9c1b-65a8a410138f/volumes" Oct 01 05:47:27 crc kubenswrapper[4661]: I1001 05:47:27.971581 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Oct 01 05:47:27 crc kubenswrapper[4661]: I1001 05:47:27.971763 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"ea06b280-9a36-48a6-a5b8-b2f0f342bf66","Type":"ContainerDied","Data":"be3e731d9ed66958b005096fad3f51bdc1d878e2c81cc2d6d2ce4f562cd28309"} Oct 01 05:47:27 crc kubenswrapper[4661]: I1001 05:47:27.971864 4661 scope.go:117] "RemoveContainer" containerID="77ccfbca6f6e32ac52c29498536849a6d5b4656d446858ebc9537db59648d587" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.002647 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.026776 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-api-0"] Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.029315 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Oct 01 05:47:28 crc kubenswrapper[4661]: E1001 05:47:28.030512 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de7b601b-11d7-48a5-9c1b-65a8a410138f" containerName="init" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.030530 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="de7b601b-11d7-48a5-9c1b-65a8a410138f" containerName="init" Oct 01 05:47:28 crc kubenswrapper[4661]: E1001 05:47:28.030554 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e30e23d1-dd87-43e3-975f-38c9c67a63eb" containerName="mariadb-account-create" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.030560 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="e30e23d1-dd87-43e3-975f-38c9c67a63eb" containerName="mariadb-account-create" Oct 01 05:47:28 crc kubenswrapper[4661]: E1001 05:47:28.030568 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d174eeb-ff97-4090-9f9a-d30f97bd926e" containerName="mariadb-account-create" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.030575 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d174eeb-ff97-4090-9f9a-d30f97bd926e" containerName="mariadb-account-create" Oct 01 05:47:28 crc kubenswrapper[4661]: E1001 05:47:28.030825 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea06b280-9a36-48a6-a5b8-b2f0f342bf66" containerName="watcher-api" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.030833 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea06b280-9a36-48a6-a5b8-b2f0f342bf66" containerName="watcher-api" Oct 01 05:47:28 crc kubenswrapper[4661]: E1001 05:47:28.030843 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea06b280-9a36-48a6-a5b8-b2f0f342bf66" containerName="watcher-api-log" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.030848 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea06b280-9a36-48a6-a5b8-b2f0f342bf66" containerName="watcher-api-log" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.035826 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="e30e23d1-dd87-43e3-975f-38c9c67a63eb" containerName="mariadb-account-create" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.035874 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea06b280-9a36-48a6-a5b8-b2f0f342bf66" containerName="watcher-api" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.035893 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="de7b601b-11d7-48a5-9c1b-65a8a410138f" containerName="init" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.035899 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d174eeb-ff97-4090-9f9a-d30f97bd926e" containerName="mariadb-account-create" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.035907 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea06b280-9a36-48a6-a5b8-b2f0f342bf66" containerName="watcher-api-log" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.036937 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.041051 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.071711 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.209056 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5rzf\" (UniqueName: \"kubernetes.io/projected/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-kube-api-access-s5rzf\") pod \"watcher-api-0\" (UID: \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\") " pod="openstack/watcher-api-0" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.209138 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\") " pod="openstack/watcher-api-0" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.209171 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\") " pod="openstack/watcher-api-0" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.209195 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-logs\") pod \"watcher-api-0\" (UID: \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\") " pod="openstack/watcher-api-0" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.209245 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-config-data\") pod \"watcher-api-0\" (UID: \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\") " pod="openstack/watcher-api-0" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.310489 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5rzf\" (UniqueName: \"kubernetes.io/projected/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-kube-api-access-s5rzf\") pod \"watcher-api-0\" (UID: \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\") " pod="openstack/watcher-api-0" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.310573 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\") " pod="openstack/watcher-api-0" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.310609 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\") " pod="openstack/watcher-api-0" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.310644 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-logs\") pod \"watcher-api-0\" (UID: \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\") " pod="openstack/watcher-api-0" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.310697 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-config-data\") pod \"watcher-api-0\" (UID: \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\") " pod="openstack/watcher-api-0" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.311624 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-logs\") pod \"watcher-api-0\" (UID: \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\") " pod="openstack/watcher-api-0" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.315487 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-config-data\") pod \"watcher-api-0\" (UID: \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\") " pod="openstack/watcher-api-0" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.316216 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\") " pod="openstack/watcher-api-0" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.320583 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\") " pod="openstack/watcher-api-0" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.329271 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5rzf\" (UniqueName: \"kubernetes.io/projected/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-kube-api-access-s5rzf\") pod \"watcher-api-0\" (UID: \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\") " pod="openstack/watcher-api-0" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.381555 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Oct 01 05:47:28 crc kubenswrapper[4661]: I1001 05:47:28.905364 4661 scope.go:117] "RemoveContainer" containerID="36a4fbb35eb02a000230d46293011daca3ec842330ee75ef956e63d7ad11bd19" Oct 01 05:47:29 crc kubenswrapper[4661]: I1001 05:47:29.774086 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea06b280-9a36-48a6-a5b8-b2f0f342bf66" path="/var/lib/kubelet/pods/ea06b280-9a36-48a6-a5b8-b2f0f342bf66/volumes" Oct 01 05:47:30 crc kubenswrapper[4661]: I1001 05:47:30.022690 4661 generic.go:334] "Generic (PLEG): container finished" podID="788e9378-cdc8-4a59-8d19-4ace345b122f" containerID="259982fb4b657562c20206c08d94cf8f2e864b923f2d6af89da07aa5071b9acc" exitCode=0 Oct 01 05:47:30 crc kubenswrapper[4661]: I1001 05:47:30.022760 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7v9qh" event={"ID":"788e9378-cdc8-4a59-8d19-4ace345b122f","Type":"ContainerDied","Data":"259982fb4b657562c20206c08d94cf8f2e864b923f2d6af89da07aa5071b9acc"} Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.348016 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-c6c7c854f-5kllc"] Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.396184 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-8674487c84-nz4kb"] Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.398463 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.401315 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.406871 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-8674487c84-nz4kb"] Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.459267 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7fbf467685-6nvqq"] Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.484848 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5fa3d349-9844-4d00-ac96-5c59f46badfa-config-data\") pod \"horizon-8674487c84-nz4kb\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.484907 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fa3d349-9844-4d00-ac96-5c59f46badfa-combined-ca-bundle\") pod \"horizon-8674487c84-nz4kb\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.484973 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fa3d349-9844-4d00-ac96-5c59f46badfa-horizon-tls-certs\") pod \"horizon-8674487c84-nz4kb\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.485023 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fa3d349-9844-4d00-ac96-5c59f46badfa-logs\") pod \"horizon-8674487c84-nz4kb\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.485048 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5fa3d349-9844-4d00-ac96-5c59f46badfa-horizon-secret-key\") pod \"horizon-8674487c84-nz4kb\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.485086 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5fa3d349-9844-4d00-ac96-5c59f46badfa-scripts\") pod \"horizon-8674487c84-nz4kb\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.485107 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bln8d\" (UniqueName: \"kubernetes.io/projected/5fa3d349-9844-4d00-ac96-5c59f46badfa-kube-api-access-bln8d\") pod \"horizon-8674487c84-nz4kb\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.500651 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-77d96d88fb-5fr24"] Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.502658 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.508184 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-77d96d88fb-5fr24"] Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.587780 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p672t\" (UniqueName: \"kubernetes.io/projected/a78c5827-b563-4f29-9a60-6810f67f943a-kube-api-access-p672t\") pod \"horizon-77d96d88fb-5fr24\" (UID: \"a78c5827-b563-4f29-9a60-6810f67f943a\") " pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.587833 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a78c5827-b563-4f29-9a60-6810f67f943a-horizon-secret-key\") pod \"horizon-77d96d88fb-5fr24\" (UID: \"a78c5827-b563-4f29-9a60-6810f67f943a\") " pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.587926 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fa3d349-9844-4d00-ac96-5c59f46badfa-logs\") pod \"horizon-8674487c84-nz4kb\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.587988 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5fa3d349-9844-4d00-ac96-5c59f46badfa-horizon-secret-key\") pod \"horizon-8674487c84-nz4kb\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.588020 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a78c5827-b563-4f29-9a60-6810f67f943a-logs\") pod \"horizon-77d96d88fb-5fr24\" (UID: \"a78c5827-b563-4f29-9a60-6810f67f943a\") " pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.588047 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a78c5827-b563-4f29-9a60-6810f67f943a-combined-ca-bundle\") pod \"horizon-77d96d88fb-5fr24\" (UID: \"a78c5827-b563-4f29-9a60-6810f67f943a\") " pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.588081 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5fa3d349-9844-4d00-ac96-5c59f46badfa-scripts\") pod \"horizon-8674487c84-nz4kb\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.588104 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bln8d\" (UniqueName: \"kubernetes.io/projected/5fa3d349-9844-4d00-ac96-5c59f46badfa-kube-api-access-bln8d\") pod \"horizon-8674487c84-nz4kb\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.588123 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a78c5827-b563-4f29-9a60-6810f67f943a-scripts\") pod \"horizon-77d96d88fb-5fr24\" (UID: \"a78c5827-b563-4f29-9a60-6810f67f943a\") " pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.588147 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5fa3d349-9844-4d00-ac96-5c59f46badfa-config-data\") pod \"horizon-8674487c84-nz4kb\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.588187 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a78c5827-b563-4f29-9a60-6810f67f943a-config-data\") pod \"horizon-77d96d88fb-5fr24\" (UID: \"a78c5827-b563-4f29-9a60-6810f67f943a\") " pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.588206 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a78c5827-b563-4f29-9a60-6810f67f943a-horizon-tls-certs\") pod \"horizon-77d96d88fb-5fr24\" (UID: \"a78c5827-b563-4f29-9a60-6810f67f943a\") " pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.588224 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fa3d349-9844-4d00-ac96-5c59f46badfa-combined-ca-bundle\") pod \"horizon-8674487c84-nz4kb\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.588295 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fa3d349-9844-4d00-ac96-5c59f46badfa-horizon-tls-certs\") pod \"horizon-8674487c84-nz4kb\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.590076 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5fa3d349-9844-4d00-ac96-5c59f46badfa-scripts\") pod \"horizon-8674487c84-nz4kb\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.590322 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fa3d349-9844-4d00-ac96-5c59f46badfa-logs\") pod \"horizon-8674487c84-nz4kb\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.591195 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5fa3d349-9844-4d00-ac96-5c59f46badfa-config-data\") pod \"horizon-8674487c84-nz4kb\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.596005 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fa3d349-9844-4d00-ac96-5c59f46badfa-horizon-tls-certs\") pod \"horizon-8674487c84-nz4kb\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.598001 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5fa3d349-9844-4d00-ac96-5c59f46badfa-horizon-secret-key\") pod \"horizon-8674487c84-nz4kb\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.598954 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fa3d349-9844-4d00-ac96-5c59f46badfa-combined-ca-bundle\") pod \"horizon-8674487c84-nz4kb\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.612959 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bln8d\" (UniqueName: \"kubernetes.io/projected/5fa3d349-9844-4d00-ac96-5c59f46badfa-kube-api-access-bln8d\") pod \"horizon-8674487c84-nz4kb\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.690468 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p672t\" (UniqueName: \"kubernetes.io/projected/a78c5827-b563-4f29-9a60-6810f67f943a-kube-api-access-p672t\") pod \"horizon-77d96d88fb-5fr24\" (UID: \"a78c5827-b563-4f29-9a60-6810f67f943a\") " pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.690541 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a78c5827-b563-4f29-9a60-6810f67f943a-horizon-secret-key\") pod \"horizon-77d96d88fb-5fr24\" (UID: \"a78c5827-b563-4f29-9a60-6810f67f943a\") " pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.690618 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a78c5827-b563-4f29-9a60-6810f67f943a-logs\") pod \"horizon-77d96d88fb-5fr24\" (UID: \"a78c5827-b563-4f29-9a60-6810f67f943a\") " pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.690671 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a78c5827-b563-4f29-9a60-6810f67f943a-combined-ca-bundle\") pod \"horizon-77d96d88fb-5fr24\" (UID: \"a78c5827-b563-4f29-9a60-6810f67f943a\") " pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.690731 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a78c5827-b563-4f29-9a60-6810f67f943a-scripts\") pod \"horizon-77d96d88fb-5fr24\" (UID: \"a78c5827-b563-4f29-9a60-6810f67f943a\") " pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.690784 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a78c5827-b563-4f29-9a60-6810f67f943a-config-data\") pod \"horizon-77d96d88fb-5fr24\" (UID: \"a78c5827-b563-4f29-9a60-6810f67f943a\") " pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.690800 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a78c5827-b563-4f29-9a60-6810f67f943a-horizon-tls-certs\") pod \"horizon-77d96d88fb-5fr24\" (UID: \"a78c5827-b563-4f29-9a60-6810f67f943a\") " pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.691519 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a78c5827-b563-4f29-9a60-6810f67f943a-logs\") pod \"horizon-77d96d88fb-5fr24\" (UID: \"a78c5827-b563-4f29-9a60-6810f67f943a\") " pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.691960 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a78c5827-b563-4f29-9a60-6810f67f943a-scripts\") pod \"horizon-77d96d88fb-5fr24\" (UID: \"a78c5827-b563-4f29-9a60-6810f67f943a\") " pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.692299 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a78c5827-b563-4f29-9a60-6810f67f943a-config-data\") pod \"horizon-77d96d88fb-5fr24\" (UID: \"a78c5827-b563-4f29-9a60-6810f67f943a\") " pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.694112 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a78c5827-b563-4f29-9a60-6810f67f943a-combined-ca-bundle\") pod \"horizon-77d96d88fb-5fr24\" (UID: \"a78c5827-b563-4f29-9a60-6810f67f943a\") " pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.696296 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a78c5827-b563-4f29-9a60-6810f67f943a-horizon-secret-key\") pod \"horizon-77d96d88fb-5fr24\" (UID: \"a78c5827-b563-4f29-9a60-6810f67f943a\") " pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.705066 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a78c5827-b563-4f29-9a60-6810f67f943a-horizon-tls-certs\") pod \"horizon-77d96d88fb-5fr24\" (UID: \"a78c5827-b563-4f29-9a60-6810f67f943a\") " pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.705709 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p672t\" (UniqueName: \"kubernetes.io/projected/a78c5827-b563-4f29-9a60-6810f67f943a-kube-api-access-p672t\") pod \"horizon-77d96d88fb-5fr24\" (UID: \"a78c5827-b563-4f29-9a60-6810f67f943a\") " pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.719238 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:31 crc kubenswrapper[4661]: I1001 05:47:31.833399 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.001731 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-q2jck"] Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.004515 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-q2jck" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.006947 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-ht7jd" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.007510 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.007679 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.013611 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-q2jck"] Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.097972 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-config-data\") pod \"cinder-db-sync-q2jck\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " pod="openstack/cinder-db-sync-q2jck" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.098018 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-db-sync-config-data\") pod \"cinder-db-sync-q2jck\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " pod="openstack/cinder-db-sync-q2jck" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.098082 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/77022b65-9c8f-4173-957d-0d0e457bd838-etc-machine-id\") pod \"cinder-db-sync-q2jck\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " pod="openstack/cinder-db-sync-q2jck" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.098174 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-combined-ca-bundle\") pod \"cinder-db-sync-q2jck\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " pod="openstack/cinder-db-sync-q2jck" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.098271 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkd2h\" (UniqueName: \"kubernetes.io/projected/77022b65-9c8f-4173-957d-0d0e457bd838-kube-api-access-dkd2h\") pod \"cinder-db-sync-q2jck\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " pod="openstack/cinder-db-sync-q2jck" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.098318 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-scripts\") pod \"cinder-db-sync-q2jck\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " pod="openstack/cinder-db-sync-q2jck" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.200680 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkd2h\" (UniqueName: \"kubernetes.io/projected/77022b65-9c8f-4173-957d-0d0e457bd838-kube-api-access-dkd2h\") pod \"cinder-db-sync-q2jck\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " pod="openstack/cinder-db-sync-q2jck" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.200723 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-scripts\") pod \"cinder-db-sync-q2jck\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " pod="openstack/cinder-db-sync-q2jck" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.200780 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-config-data\") pod \"cinder-db-sync-q2jck\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " pod="openstack/cinder-db-sync-q2jck" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.200797 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-db-sync-config-data\") pod \"cinder-db-sync-q2jck\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " pod="openstack/cinder-db-sync-q2jck" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.200830 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/77022b65-9c8f-4173-957d-0d0e457bd838-etc-machine-id\") pod \"cinder-db-sync-q2jck\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " pod="openstack/cinder-db-sync-q2jck" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.200867 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-combined-ca-bundle\") pod \"cinder-db-sync-q2jck\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " pod="openstack/cinder-db-sync-q2jck" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.205102 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/77022b65-9c8f-4173-957d-0d0e457bd838-etc-machine-id\") pod \"cinder-db-sync-q2jck\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " pod="openstack/cinder-db-sync-q2jck" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.206238 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-combined-ca-bundle\") pod \"cinder-db-sync-q2jck\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " pod="openstack/cinder-db-sync-q2jck" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.206279 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-scripts\") pod \"cinder-db-sync-q2jck\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " pod="openstack/cinder-db-sync-q2jck" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.206550 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-config-data\") pod \"cinder-db-sync-q2jck\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " pod="openstack/cinder-db-sync-q2jck" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.213983 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-db-sync-config-data\") pod \"cinder-db-sync-q2jck\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " pod="openstack/cinder-db-sync-q2jck" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.221498 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkd2h\" (UniqueName: \"kubernetes.io/projected/77022b65-9c8f-4173-957d-0d0e457bd838-kube-api-access-dkd2h\") pod \"cinder-db-sync-q2jck\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " pod="openstack/cinder-db-sync-q2jck" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.288596 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-bcc6q"] Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.289935 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bcc6q" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.295825 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.295953 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.296104 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-2qc29" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.302510 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8fdca12-5e6d-43d7-ae59-33b3a388ada4-combined-ca-bundle\") pod \"neutron-db-sync-bcc6q\" (UID: \"e8fdca12-5e6d-43d7-ae59-33b3a388ada4\") " pod="openstack/neutron-db-sync-bcc6q" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.306846 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8vtr\" (UniqueName: \"kubernetes.io/projected/e8fdca12-5e6d-43d7-ae59-33b3a388ada4-kube-api-access-r8vtr\") pod \"neutron-db-sync-bcc6q\" (UID: \"e8fdca12-5e6d-43d7-ae59-33b3a388ada4\") " pod="openstack/neutron-db-sync-bcc6q" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.307086 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e8fdca12-5e6d-43d7-ae59-33b3a388ada4-config\") pod \"neutron-db-sync-bcc6q\" (UID: \"e8fdca12-5e6d-43d7-ae59-33b3a388ada4\") " pod="openstack/neutron-db-sync-bcc6q" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.312739 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-bcc6q"] Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.333800 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-q2jck" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.409490 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8fdca12-5e6d-43d7-ae59-33b3a388ada4-combined-ca-bundle\") pod \"neutron-db-sync-bcc6q\" (UID: \"e8fdca12-5e6d-43d7-ae59-33b3a388ada4\") " pod="openstack/neutron-db-sync-bcc6q" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.409552 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8vtr\" (UniqueName: \"kubernetes.io/projected/e8fdca12-5e6d-43d7-ae59-33b3a388ada4-kube-api-access-r8vtr\") pod \"neutron-db-sync-bcc6q\" (UID: \"e8fdca12-5e6d-43d7-ae59-33b3a388ada4\") " pod="openstack/neutron-db-sync-bcc6q" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.409623 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e8fdca12-5e6d-43d7-ae59-33b3a388ada4-config\") pod \"neutron-db-sync-bcc6q\" (UID: \"e8fdca12-5e6d-43d7-ae59-33b3a388ada4\") " pod="openstack/neutron-db-sync-bcc6q" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.413534 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8fdca12-5e6d-43d7-ae59-33b3a388ada4-combined-ca-bundle\") pod \"neutron-db-sync-bcc6q\" (UID: \"e8fdca12-5e6d-43d7-ae59-33b3a388ada4\") " pod="openstack/neutron-db-sync-bcc6q" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.423669 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/e8fdca12-5e6d-43d7-ae59-33b3a388ada4-config\") pod \"neutron-db-sync-bcc6q\" (UID: \"e8fdca12-5e6d-43d7-ae59-33b3a388ada4\") " pod="openstack/neutron-db-sync-bcc6q" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.425785 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8vtr\" (UniqueName: \"kubernetes.io/projected/e8fdca12-5e6d-43d7-ae59-33b3a388ada4-kube-api-access-r8vtr\") pod \"neutron-db-sync-bcc6q\" (UID: \"e8fdca12-5e6d-43d7-ae59-33b3a388ada4\") " pod="openstack/neutron-db-sync-bcc6q" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.608462 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bcc6q" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.838660 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.936343 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55b99bf79c-xpfjw"] Oct 01 05:47:32 crc kubenswrapper[4661]: I1001 05:47:32.942980 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" podUID="29234de0-146d-457e-9b25-f0d8804cf06e" containerName="dnsmasq-dns" containerID="cri-o://138b9d7cb946db5cddc0facb6398bc5a4bafc87abc0b888c399b94904e85b0fb" gracePeriod=10 Oct 01 05:47:34 crc kubenswrapper[4661]: I1001 05:47:34.075433 4661 generic.go:334] "Generic (PLEG): container finished" podID="29234de0-146d-457e-9b25-f0d8804cf06e" containerID="138b9d7cb946db5cddc0facb6398bc5a4bafc87abc0b888c399b94904e85b0fb" exitCode=0 Oct 01 05:47:34 crc kubenswrapper[4661]: I1001 05:47:34.075499 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" event={"ID":"29234de0-146d-457e-9b25-f0d8804cf06e","Type":"ContainerDied","Data":"138b9d7cb946db5cddc0facb6398bc5a4bafc87abc0b888c399b94904e85b0fb"} Oct 01 05:47:34 crc kubenswrapper[4661]: I1001 05:47:34.309083 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:47:34 crc kubenswrapper[4661]: I1001 05:47:34.309146 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:47:37 crc kubenswrapper[4661]: I1001 05:47:37.219551 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" podUID="29234de0-146d-457e-9b25-f0d8804cf06e" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.134:5353: connect: connection refused" Oct 01 05:47:41 crc kubenswrapper[4661]: E1001 05:47:41.099060 4661 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-horizon:current" Oct 01 05:47:41 crc kubenswrapper[4661]: E1001 05:47:41.099683 4661 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-horizon:current" Oct 01 05:47:41 crc kubenswrapper[4661]: E1001 05:47:41.099803 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.rdoproject.org/podified-master-centos10/openstack-horizon:current,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nb4h656h5cbh5cdh5c5h667hb8h56fh666hfbhbfh584hbfh55ch5b5h589hc8h554hcdh657h75h665h6h5ch69h657h7ch6dh5cbh5c7h89h59bq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:yes,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tjb9t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-c6c7c854f-5kllc_openstack(fb76e186-d5fa-497d-aca8-c887075b90c7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 05:47:41 crc kubenswrapper[4661]: E1001 05:47:41.107072 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-horizon:current\\\"\"]" pod="openstack/horizon-c6c7c854f-5kllc" podUID="fb76e186-d5fa-497d-aca8-c887075b90c7" Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.147371 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7v9qh" event={"ID":"788e9378-cdc8-4a59-8d19-4ace345b122f","Type":"ContainerDied","Data":"b58af04c34a9e8bd4afee1406cf2409328595ba0e85557b801701bd2c79a0df4"} Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.147421 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b58af04c34a9e8bd4afee1406cf2409328595ba0e85557b801701bd2c79a0df4" Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.267041 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7v9qh" Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.397014 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-fernet-keys\") pod \"788e9378-cdc8-4a59-8d19-4ace345b122f\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.397069 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-scripts\") pod \"788e9378-cdc8-4a59-8d19-4ace345b122f\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.397140 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-combined-ca-bundle\") pod \"788e9378-cdc8-4a59-8d19-4ace345b122f\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.397161 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-credential-keys\") pod \"788e9378-cdc8-4a59-8d19-4ace345b122f\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.397296 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhqn5\" (UniqueName: \"kubernetes.io/projected/788e9378-cdc8-4a59-8d19-4ace345b122f-kube-api-access-qhqn5\") pod \"788e9378-cdc8-4a59-8d19-4ace345b122f\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.397361 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-config-data\") pod \"788e9378-cdc8-4a59-8d19-4ace345b122f\" (UID: \"788e9378-cdc8-4a59-8d19-4ace345b122f\") " Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.403027 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/788e9378-cdc8-4a59-8d19-4ace345b122f-kube-api-access-qhqn5" (OuterVolumeSpecName: "kube-api-access-qhqn5") pod "788e9378-cdc8-4a59-8d19-4ace345b122f" (UID: "788e9378-cdc8-4a59-8d19-4ace345b122f"). InnerVolumeSpecName "kube-api-access-qhqn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.403746 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "788e9378-cdc8-4a59-8d19-4ace345b122f" (UID: "788e9378-cdc8-4a59-8d19-4ace345b122f"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.414140 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-scripts" (OuterVolumeSpecName: "scripts") pod "788e9378-cdc8-4a59-8d19-4ace345b122f" (UID: "788e9378-cdc8-4a59-8d19-4ace345b122f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.424798 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "788e9378-cdc8-4a59-8d19-4ace345b122f" (UID: "788e9378-cdc8-4a59-8d19-4ace345b122f"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.465863 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "788e9378-cdc8-4a59-8d19-4ace345b122f" (UID: "788e9378-cdc8-4a59-8d19-4ace345b122f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.471894 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-config-data" (OuterVolumeSpecName: "config-data") pod "788e9378-cdc8-4a59-8d19-4ace345b122f" (UID: "788e9378-cdc8-4a59-8d19-4ace345b122f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.499450 4661 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.499481 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.499489 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.499499 4661 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.499509 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhqn5\" (UniqueName: \"kubernetes.io/projected/788e9378-cdc8-4a59-8d19-4ace345b122f-kube-api-access-qhqn5\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.499517 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/788e9378-cdc8-4a59-8d19-4ace345b122f-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:41 crc kubenswrapper[4661]: E1001 05:47:41.800406 4661 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-barbican-api:current" Oct 01 05:47:41 crc kubenswrapper[4661]: E1001 05:47:41.800469 4661 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-barbican-api:current" Oct 01 05:47:41 crc kubenswrapper[4661]: E1001 05:47:41.800614 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.rdoproject.org/podified-master-centos10/openstack-barbican-api:current,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nkw8h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-cwwsl_openstack(9ae8568a-ecc3-429d-9717-0d05cf2e52d1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 05:47:41 crc kubenswrapper[4661]: E1001 05:47:41.801864 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-cwwsl" podUID="9ae8568a-ecc3-429d-9717-0d05cf2e52d1" Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.883529 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-c6c7c854f-5kllc" Oct 01 05:47:41 crc kubenswrapper[4661]: I1001 05:47:41.893509 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.033280 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fb76e186-d5fa-497d-aca8-c887075b90c7-scripts\") pod \"fb76e186-d5fa-497d-aca8-c887075b90c7\" (UID: \"fb76e186-d5fa-497d-aca8-c887075b90c7\") " Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.033850 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ds7cp\" (UniqueName: \"kubernetes.io/projected/29234de0-146d-457e-9b25-f0d8804cf06e-kube-api-access-ds7cp\") pod \"29234de0-146d-457e-9b25-f0d8804cf06e\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.033867 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb76e186-d5fa-497d-aca8-c887075b90c7-scripts" (OuterVolumeSpecName: "scripts") pod "fb76e186-d5fa-497d-aca8-c887075b90c7" (UID: "fb76e186-d5fa-497d-aca8-c887075b90c7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.033884 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb76e186-d5fa-497d-aca8-c887075b90c7-logs\") pod \"fb76e186-d5fa-497d-aca8-c887075b90c7\" (UID: \"fb76e186-d5fa-497d-aca8-c887075b90c7\") " Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.034093 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb76e186-d5fa-497d-aca8-c887075b90c7-logs" (OuterVolumeSpecName: "logs") pod "fb76e186-d5fa-497d-aca8-c887075b90c7" (UID: "fb76e186-d5fa-497d-aca8-c887075b90c7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.034224 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-config\") pod \"29234de0-146d-457e-9b25-f0d8804cf06e\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.034256 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-dns-swift-storage-0\") pod \"29234de0-146d-457e-9b25-f0d8804cf06e\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.034353 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-ovsdbserver-sb\") pod \"29234de0-146d-457e-9b25-f0d8804cf06e\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.034380 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tjb9t\" (UniqueName: \"kubernetes.io/projected/fb76e186-d5fa-497d-aca8-c887075b90c7-kube-api-access-tjb9t\") pod \"fb76e186-d5fa-497d-aca8-c887075b90c7\" (UID: \"fb76e186-d5fa-497d-aca8-c887075b90c7\") " Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.034449 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fb76e186-d5fa-497d-aca8-c887075b90c7-config-data\") pod \"fb76e186-d5fa-497d-aca8-c887075b90c7\" (UID: \"fb76e186-d5fa-497d-aca8-c887075b90c7\") " Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.034476 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fb76e186-d5fa-497d-aca8-c887075b90c7-horizon-secret-key\") pod \"fb76e186-d5fa-497d-aca8-c887075b90c7\" (UID: \"fb76e186-d5fa-497d-aca8-c887075b90c7\") " Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.034519 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-dns-svc\") pod \"29234de0-146d-457e-9b25-f0d8804cf06e\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.034538 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-ovsdbserver-nb\") pod \"29234de0-146d-457e-9b25-f0d8804cf06e\" (UID: \"29234de0-146d-457e-9b25-f0d8804cf06e\") " Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.038067 4661 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb76e186-d5fa-497d-aca8-c887075b90c7-logs\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.038095 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fb76e186-d5fa-497d-aca8-c887075b90c7-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.040258 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29234de0-146d-457e-9b25-f0d8804cf06e-kube-api-access-ds7cp" (OuterVolumeSpecName: "kube-api-access-ds7cp") pod "29234de0-146d-457e-9b25-f0d8804cf06e" (UID: "29234de0-146d-457e-9b25-f0d8804cf06e"). InnerVolumeSpecName "kube-api-access-ds7cp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.041436 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb76e186-d5fa-497d-aca8-c887075b90c7-config-data" (OuterVolumeSpecName: "config-data") pod "fb76e186-d5fa-497d-aca8-c887075b90c7" (UID: "fb76e186-d5fa-497d-aca8-c887075b90c7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.053766 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb76e186-d5fa-497d-aca8-c887075b90c7-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "fb76e186-d5fa-497d-aca8-c887075b90c7" (UID: "fb76e186-d5fa-497d-aca8-c887075b90c7"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.053829 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb76e186-d5fa-497d-aca8-c887075b90c7-kube-api-access-tjb9t" (OuterVolumeSpecName: "kube-api-access-tjb9t") pod "fb76e186-d5fa-497d-aca8-c887075b90c7" (UID: "fb76e186-d5fa-497d-aca8-c887075b90c7"). InnerVolumeSpecName "kube-api-access-tjb9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.101983 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "29234de0-146d-457e-9b25-f0d8804cf06e" (UID: "29234de0-146d-457e-9b25-f0d8804cf06e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.103277 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "29234de0-146d-457e-9b25-f0d8804cf06e" (UID: "29234de0-146d-457e-9b25-f0d8804cf06e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.110252 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "29234de0-146d-457e-9b25-f0d8804cf06e" (UID: "29234de0-146d-457e-9b25-f0d8804cf06e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.127506 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-config" (OuterVolumeSpecName: "config") pod "29234de0-146d-457e-9b25-f0d8804cf06e" (UID: "29234de0-146d-457e-9b25-f0d8804cf06e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.138372 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "29234de0-146d-457e-9b25-f0d8804cf06e" (UID: "29234de0-146d-457e-9b25-f0d8804cf06e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.139547 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ds7cp\" (UniqueName: \"kubernetes.io/projected/29234de0-146d-457e-9b25-f0d8804cf06e-kube-api-access-ds7cp\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.139570 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.139580 4661 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.139590 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.139598 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tjb9t\" (UniqueName: \"kubernetes.io/projected/fb76e186-d5fa-497d-aca8-c887075b90c7-kube-api-access-tjb9t\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.139607 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fb76e186-d5fa-497d-aca8-c887075b90c7-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.139868 4661 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fb76e186-d5fa-497d-aca8-c887075b90c7-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.139881 4661 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.139889 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29234de0-146d-457e-9b25-f0d8804cf06e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.155896 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c6c7c854f-5kllc" event={"ID":"fb76e186-d5fa-497d-aca8-c887075b90c7","Type":"ContainerDied","Data":"aad5678815040ff64a3274b5ec6bcb29f0dfb04e621db8a8e22846887481bef2"} Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.155976 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-c6c7c854f-5kllc" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.160894 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" event={"ID":"29234de0-146d-457e-9b25-f0d8804cf06e","Type":"ContainerDied","Data":"96d8e78f6354368eb87106f17f6c409d9630d2ee43994c04bf3b93f3a4708732"} Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.160936 4661 scope.go:117] "RemoveContainer" containerID="138b9d7cb946db5cddc0facb6398bc5a4bafc87abc0b888c399b94904e85b0fb" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.160957 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55b99bf79c-xpfjw" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.161223 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7v9qh" Oct 01 05:47:42 crc kubenswrapper[4661]: E1001 05:47:42.234407 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-barbican-api:current\\\"\"" pod="openstack/barbican-db-sync-cwwsl" podUID="9ae8568a-ecc3-429d-9717-0d05cf2e52d1" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.321933 4661 scope.go:117] "RemoveContainer" containerID="3ac5f85e88d4e02d7e739333fea968d1df792540624cbf881e0ad2d3a5e70bbc" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.340033 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-7v9qh"] Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.346409 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-7v9qh"] Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.397179 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55b99bf79c-xpfjw"] Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.404082 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55b99bf79c-xpfjw"] Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.438117 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-xp8s8"] Oct 01 05:47:42 crc kubenswrapper[4661]: E1001 05:47:42.438447 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29234de0-146d-457e-9b25-f0d8804cf06e" containerName="init" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.438460 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="29234de0-146d-457e-9b25-f0d8804cf06e" containerName="init" Oct 01 05:47:42 crc kubenswrapper[4661]: E1001 05:47:42.438467 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29234de0-146d-457e-9b25-f0d8804cf06e" containerName="dnsmasq-dns" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.438472 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="29234de0-146d-457e-9b25-f0d8804cf06e" containerName="dnsmasq-dns" Oct 01 05:47:42 crc kubenswrapper[4661]: E1001 05:47:42.438489 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="788e9378-cdc8-4a59-8d19-4ace345b122f" containerName="keystone-bootstrap" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.438497 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="788e9378-cdc8-4a59-8d19-4ace345b122f" containerName="keystone-bootstrap" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.438685 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="29234de0-146d-457e-9b25-f0d8804cf06e" containerName="dnsmasq-dns" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.438711 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="788e9378-cdc8-4a59-8d19-4ace345b122f" containerName="keystone-bootstrap" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.439446 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xp8s8" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.442611 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.442709 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.442790 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-5nw2h" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.442966 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.446833 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-config-data\") pod \"keystone-bootstrap-xp8s8\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " pod="openstack/keystone-bootstrap-xp8s8" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.447063 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-scripts\") pod \"keystone-bootstrap-xp8s8\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " pod="openstack/keystone-bootstrap-xp8s8" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.447091 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jj2cx\" (UniqueName: \"kubernetes.io/projected/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-kube-api-access-jj2cx\") pod \"keystone-bootstrap-xp8s8\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " pod="openstack/keystone-bootstrap-xp8s8" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.447191 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-combined-ca-bundle\") pod \"keystone-bootstrap-xp8s8\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " pod="openstack/keystone-bootstrap-xp8s8" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.447228 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-credential-keys\") pod \"keystone-bootstrap-xp8s8\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " pod="openstack/keystone-bootstrap-xp8s8" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.447287 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-fernet-keys\") pod \"keystone-bootstrap-xp8s8\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " pod="openstack/keystone-bootstrap-xp8s8" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.461939 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-xp8s8"] Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.524220 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-c6c7c854f-5kllc"] Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.532783 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-c6c7c854f-5kllc"] Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.539803 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-8674487c84-nz4kb"] Oct 01 05:47:42 crc kubenswrapper[4661]: W1001 05:47:42.540262 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5fa3d349_9844_4d00_ac96_5c59f46badfa.slice/crio-5446aae209cd07b6cd0244dd4d3774b4a851478ad5ab1be2b23ad5d63d079b54 WatchSource:0}: Error finding container 5446aae209cd07b6cd0244dd4d3774b4a851478ad5ab1be2b23ad5d63d079b54: Status 404 returned error can't find the container with id 5446aae209cd07b6cd0244dd4d3774b4a851478ad5ab1be2b23ad5d63d079b54 Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.546338 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-q2jck"] Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.548986 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-credential-keys\") pod \"keystone-bootstrap-xp8s8\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " pod="openstack/keystone-bootstrap-xp8s8" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.549023 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-fernet-keys\") pod \"keystone-bootstrap-xp8s8\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " pod="openstack/keystone-bootstrap-xp8s8" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.549046 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-config-data\") pod \"keystone-bootstrap-xp8s8\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " pod="openstack/keystone-bootstrap-xp8s8" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.549068 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-scripts\") pod \"keystone-bootstrap-xp8s8\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " pod="openstack/keystone-bootstrap-xp8s8" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.549097 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jj2cx\" (UniqueName: \"kubernetes.io/projected/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-kube-api-access-jj2cx\") pod \"keystone-bootstrap-xp8s8\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " pod="openstack/keystone-bootstrap-xp8s8" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.549202 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-combined-ca-bundle\") pod \"keystone-bootstrap-xp8s8\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " pod="openstack/keystone-bootstrap-xp8s8" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.552779 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.555560 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-config-data\") pod \"keystone-bootstrap-xp8s8\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " pod="openstack/keystone-bootstrap-xp8s8" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.556406 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-combined-ca-bundle\") pod \"keystone-bootstrap-xp8s8\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " pod="openstack/keystone-bootstrap-xp8s8" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.557924 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-credential-keys\") pod \"keystone-bootstrap-xp8s8\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " pod="openstack/keystone-bootstrap-xp8s8" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.563940 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-scripts\") pod \"keystone-bootstrap-xp8s8\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " pod="openstack/keystone-bootstrap-xp8s8" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.564185 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-fernet-keys\") pod \"keystone-bootstrap-xp8s8\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " pod="openstack/keystone-bootstrap-xp8s8" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.575084 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jj2cx\" (UniqueName: \"kubernetes.io/projected/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-kube-api-access-jj2cx\") pod \"keystone-bootstrap-xp8s8\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " pod="openstack/keystone-bootstrap-xp8s8" Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.708826 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-77d96d88fb-5fr24"] Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.715236 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-bcc6q"] Oct 01 05:47:42 crc kubenswrapper[4661]: W1001 05:47:42.721111 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8fdca12_5e6d_43d7_ae59_33b3a388ada4.slice/crio-7bd34b4a10d518e048f69aa9fbf1af80c2025a202af069f0593233df27c325d9 WatchSource:0}: Error finding container 7bd34b4a10d518e048f69aa9fbf1af80c2025a202af069f0593233df27c325d9: Status 404 returned error can't find the container with id 7bd34b4a10d518e048f69aa9fbf1af80c2025a202af069f0593233df27c325d9 Oct 01 05:47:42 crc kubenswrapper[4661]: I1001 05:47:42.803886 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xp8s8" Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.194931 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7c73747-7ab3-4328-bec7-7708a39a50a2","Type":"ContainerStarted","Data":"6893c5a00939dfa0dd0cc74b7dcb4f3cdf52457cc6c8913411782ef8fa00b680"} Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.213681 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12","Type":"ContainerStarted","Data":"f2b5d2a8f4e0cf1118cb15a9a02e9bae2637c7d361436b6a40f7440935d6b215"} Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.220085 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-q2jck" event={"ID":"77022b65-9c8f-4173-957d-0d0e457bd838","Type":"ContainerStarted","Data":"fabab7375587685cf1fb32d0a719e123d9d0c5bf04e3986e7a216c667643c602"} Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.233568 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-applier-0" podStartSLOduration=10.293888921 podStartE2EDuration="22.23355303s" podCreationTimestamp="2025-10-01 05:47:21 +0000 UTC" firstStartedPulling="2025-10-01 05:47:24.178626179 +0000 UTC m=+1093.116604793" lastFinishedPulling="2025-10-01 05:47:36.118290288 +0000 UTC m=+1105.056268902" observedRunningTime="2025-10-01 05:47:43.231468183 +0000 UTC m=+1112.169446807" watchObservedRunningTime="2025-10-01 05:47:43.23355303 +0000 UTC m=+1112.171531644" Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.251790 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7fbf467685-6nvqq" event={"ID":"60bcbef6-72b1-41e7-9871-ad2945197629","Type":"ContainerStarted","Data":"059af4a30fb40be11cf8bb11dd6e682155eb3dfaa28138617d9dad0f19f4dcb8"} Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.251832 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7fbf467685-6nvqq" event={"ID":"60bcbef6-72b1-41e7-9871-ad2945197629","Type":"ContainerStarted","Data":"6a1a61f351ed2ff739c016048c32a4ae7829bfa4bf4cc9c034ded7e4641df5e3"} Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.251962 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7fbf467685-6nvqq" podUID="60bcbef6-72b1-41e7-9871-ad2945197629" containerName="horizon-log" containerID="cri-o://6a1a61f351ed2ff739c016048c32a4ae7829bfa4bf4cc9c034ded7e4641df5e3" gracePeriod=30 Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.255718 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7fbf467685-6nvqq" podUID="60bcbef6-72b1-41e7-9871-ad2945197629" containerName="horizon" containerID="cri-o://059af4a30fb40be11cf8bb11dd6e682155eb3dfaa28138617d9dad0f19f4dcb8" gracePeriod=30 Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.263373 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-77d96d88fb-5fr24" event={"ID":"a78c5827-b563-4f29-9a60-6810f67f943a","Type":"ContainerStarted","Data":"a1a74cc854a89d4de602f2cbbb97575c197c595d82796e5e07dbeb0cc7ba4df8"} Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.270673 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-77d96d88fb-5fr24" event={"ID":"a78c5827-b563-4f29-9a60-6810f67f943a","Type":"ContainerStarted","Data":"5a6c5fcc878d9cc5d1a7fca67dc98d4286cc48d9d5137dec7959689234d15fe2"} Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.283826 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ed164c2a-4c12-4eb0-a6d5-e37d0818a485","Type":"ContainerStarted","Data":"61786f3482eadec9b644f895ece00cb4a5b234149d52d724f7c9420b06973761"} Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.284049 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ed164c2a-4c12-4eb0-a6d5-e37d0818a485" containerName="glance-log" containerID="cri-o://0e0699d367b1f9b5183f6c53f3a7ad35da88f927509ba1de72bcfa012efa1a1b" gracePeriod=30 Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.284339 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ed164c2a-4c12-4eb0-a6d5-e37d0818a485" containerName="glance-httpd" containerID="cri-o://61786f3482eadec9b644f895ece00cb4a5b234149d52d724f7c9420b06973761" gracePeriod=30 Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.328102 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7fbf467685-6nvqq" podStartSLOduration=3.036148336 podStartE2EDuration="18.327972534s" podCreationTimestamp="2025-10-01 05:47:25 +0000 UTC" firstStartedPulling="2025-10-01 05:47:26.623280343 +0000 UTC m=+1095.561258957" lastFinishedPulling="2025-10-01 05:47:41.915104531 +0000 UTC m=+1110.853083155" observedRunningTime="2025-10-01 05:47:43.287332537 +0000 UTC m=+1112.225311161" watchObservedRunningTime="2025-10-01 05:47:43.327972534 +0000 UTC m=+1112.265951148" Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.343419 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8674487c84-nz4kb" event={"ID":"5fa3d349-9844-4d00-ac96-5c59f46badfa","Type":"ContainerStarted","Data":"75c7554a1930a34e735c9baff4c57858069e98b225029ba50465d4575c905cc2"} Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.343456 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8674487c84-nz4kb" event={"ID":"5fa3d349-9844-4d00-ac96-5c59f46badfa","Type":"ContainerStarted","Data":"5446aae209cd07b6cd0244dd4d3774b4a851478ad5ab1be2b23ad5d63d079b54"} Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.354981 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e","Type":"ContainerStarted","Data":"77e6919ae7a66f497ea77c840a4caaf333f93857d1b9052c92d6865e26c65aa0"} Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.355018 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e","Type":"ContainerStarted","Data":"f4e032b5b10de8de33a419a3446a6ae5ea868b661163b7f9f3d063bffeacf139"} Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.356947 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-6s9q7" event={"ID":"000282a3-18a6-4ea3-8055-ae4ad12ed82f","Type":"ContainerStarted","Data":"66c30d91bfe1cf0a2f9433ecca0b7a396c346c95552cd932fc0ceb14b7c30c1d"} Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.377321 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=21.377298259 podStartE2EDuration="21.377298259s" podCreationTimestamp="2025-10-01 05:47:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:47:43.352087726 +0000 UTC m=+1112.290066360" watchObservedRunningTime="2025-10-01 05:47:43.377298259 +0000 UTC m=+1112.315276873" Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.377808 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5ff7f46dd9-gj8q6" event={"ID":"337f7c79-e3bf-49ef-b783-9ac03df52fac","Type":"ContainerStarted","Data":"8870bddbd2e571f692a7e39886c80a605a6cc46613cec55b72b9effeee488e1c"} Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.377882 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5ff7f46dd9-gj8q6" event={"ID":"337f7c79-e3bf-49ef-b783-9ac03df52fac","Type":"ContainerStarted","Data":"4a8cad912110e4bd4408804ab26bfbb9283cbda46a2fb40490eaabc1759c5958"} Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.378074 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5ff7f46dd9-gj8q6" podUID="337f7c79-e3bf-49ef-b783-9ac03df52fac" containerName="horizon-log" containerID="cri-o://4a8cad912110e4bd4408804ab26bfbb9283cbda46a2fb40490eaabc1759c5958" gracePeriod=30 Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.378479 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5ff7f46dd9-gj8q6" podUID="337f7c79-e3bf-49ef-b783-9ac03df52fac" containerName="horizon" containerID="cri-o://8870bddbd2e571f692a7e39886c80a605a6cc46613cec55b72b9effeee488e1c" gracePeriod=30 Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.391867 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"87a9e404-beb1-4f1d-a7a2-188ccdacbb81","Type":"ContainerStarted","Data":"db48fc7a4a6734f309e14ca7d5365567692626d11b1764b5d5ac2c1d7cd35ddf"} Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.395148 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-6s9q7" podStartSLOduration=4.988924137 podStartE2EDuration="22.395132349s" podCreationTimestamp="2025-10-01 05:47:21 +0000 UTC" firstStartedPulling="2025-10-01 05:47:24.509701901 +0000 UTC m=+1093.447680515" lastFinishedPulling="2025-10-01 05:47:41.915910103 +0000 UTC m=+1110.853888727" observedRunningTime="2025-10-01 05:47:43.383431157 +0000 UTC m=+1112.321409771" watchObservedRunningTime="2025-10-01 05:47:43.395132349 +0000 UTC m=+1112.333110963" Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.400879 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"917f129c-762e-4483-917a-15f30ea8727d","Type":"ContainerStarted","Data":"ade549bbffffc096c018670a7b49048e6ced4aabb5cea949d8cea17695ab06e1"} Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.401130 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="917f129c-762e-4483-917a-15f30ea8727d" containerName="glance-log" containerID="cri-o://4b1f79d5d6c33963aa97163df184d53bfbe1c00d491dc4b9cc4aa4e115f27c83" gracePeriod=30 Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.401528 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="917f129c-762e-4483-917a-15f30ea8727d" containerName="glance-httpd" containerID="cri-o://ade549bbffffc096c018670a7b49048e6ced4aabb5cea949d8cea17695ab06e1" gracePeriod=30 Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.427975 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5ff7f46dd9-gj8q6" podStartSLOduration=4.55657826 podStartE2EDuration="22.42795694s" podCreationTimestamp="2025-10-01 05:47:21 +0000 UTC" firstStartedPulling="2025-10-01 05:47:24.160714565 +0000 UTC m=+1093.098693179" lastFinishedPulling="2025-10-01 05:47:42.032093245 +0000 UTC m=+1110.970071859" observedRunningTime="2025-10-01 05:47:43.408183757 +0000 UTC m=+1112.346162371" watchObservedRunningTime="2025-10-01 05:47:43.42795694 +0000 UTC m=+1112.365935544" Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.436662 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bcc6q" event={"ID":"e8fdca12-5e6d-43d7-ae59-33b3a388ada4","Type":"ContainerStarted","Data":"14d34d13c845ef5fc2830f419636aab1ec2428e0e484e69af9db6473be13355a"} Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.436711 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bcc6q" event={"ID":"e8fdca12-5e6d-43d7-ae59-33b3a388ada4","Type":"ContainerStarted","Data":"7bd34b4a10d518e048f69aa9fbf1af80c2025a202af069f0593233df27c325d9"} Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.457006 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-xp8s8"] Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.492187 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=21.492164174 podStartE2EDuration="21.492164174s" podCreationTimestamp="2025-10-01 05:47:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:47:43.436354691 +0000 UTC m=+1112.374333305" watchObservedRunningTime="2025-10-01 05:47:43.492164174 +0000 UTC m=+1112.430142788" Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.506952 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-decision-engine-0" podStartSLOduration=5.464944821 podStartE2EDuration="22.50693571s" podCreationTimestamp="2025-10-01 05:47:21 +0000 UTC" firstStartedPulling="2025-10-01 05:47:24.107375307 +0000 UTC m=+1093.045353921" lastFinishedPulling="2025-10-01 05:47:41.149366156 +0000 UTC m=+1110.087344810" observedRunningTime="2025-10-01 05:47:43.455381244 +0000 UTC m=+1112.393359858" watchObservedRunningTime="2025-10-01 05:47:43.50693571 +0000 UTC m=+1112.444914324" Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.519222 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-bcc6q" podStartSLOduration=11.519204267 podStartE2EDuration="11.519204267s" podCreationTimestamp="2025-10-01 05:47:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:47:43.478737735 +0000 UTC m=+1112.416716349" watchObservedRunningTime="2025-10-01 05:47:43.519204267 +0000 UTC m=+1112.457182881" Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.775254 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29234de0-146d-457e-9b25-f0d8804cf06e" path="/var/lib/kubelet/pods/29234de0-146d-457e-9b25-f0d8804cf06e/volumes" Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.776278 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="788e9378-cdc8-4a59-8d19-4ace345b122f" path="/var/lib/kubelet/pods/788e9378-cdc8-4a59-8d19-4ace345b122f/volumes" Oct 01 05:47:43 crc kubenswrapper[4661]: I1001 05:47:43.777090 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb76e186-d5fa-497d-aca8-c887075b90c7" path="/var/lib/kubelet/pods/fb76e186-d5fa-497d-aca8-c887075b90c7/volumes" Oct 01 05:47:44 crc kubenswrapper[4661]: I1001 05:47:44.447721 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e","Type":"ContainerStarted","Data":"77474c49cc55dbbb44863893d64cbbae78b54f0faee978b299e3d5a8651499a2"} Oct 01 05:47:44 crc kubenswrapper[4661]: I1001 05:47:44.449794 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Oct 01 05:47:44 crc kubenswrapper[4661]: I1001 05:47:44.453337 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xp8s8" event={"ID":"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3","Type":"ContainerStarted","Data":"8de448f1d29f5376eec6fc8283ca1bbad3def19755f762b9e4ba1597dfffe250"} Oct 01 05:47:44 crc kubenswrapper[4661]: I1001 05:47:44.453375 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xp8s8" event={"ID":"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3","Type":"ContainerStarted","Data":"9e12d653efb8d662b596f3ab5cf74aae4591ced696f0730a8d87bd7c436b2ee0"} Oct 01 05:47:44 crc kubenswrapper[4661]: I1001 05:47:44.455438 4661 generic.go:334] "Generic (PLEG): container finished" podID="917f129c-762e-4483-917a-15f30ea8727d" containerID="ade549bbffffc096c018670a7b49048e6ced4aabb5cea949d8cea17695ab06e1" exitCode=0 Oct 01 05:47:44 crc kubenswrapper[4661]: I1001 05:47:44.455469 4661 generic.go:334] "Generic (PLEG): container finished" podID="917f129c-762e-4483-917a-15f30ea8727d" containerID="4b1f79d5d6c33963aa97163df184d53bfbe1c00d491dc4b9cc4aa4e115f27c83" exitCode=143 Oct 01 05:47:44 crc kubenswrapper[4661]: I1001 05:47:44.455514 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"917f129c-762e-4483-917a-15f30ea8727d","Type":"ContainerDied","Data":"ade549bbffffc096c018670a7b49048e6ced4aabb5cea949d8cea17695ab06e1"} Oct 01 05:47:44 crc kubenswrapper[4661]: I1001 05:47:44.455543 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"917f129c-762e-4483-917a-15f30ea8727d","Type":"ContainerDied","Data":"4b1f79d5d6c33963aa97163df184d53bfbe1c00d491dc4b9cc4aa4e115f27c83"} Oct 01 05:47:44 crc kubenswrapper[4661]: I1001 05:47:44.457683 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-77d96d88fb-5fr24" event={"ID":"a78c5827-b563-4f29-9a60-6810f67f943a","Type":"ContainerStarted","Data":"9bf128def0c64b82934ed0ef24a977489413f8476495b8f4dedd8d9eedca9c22"} Oct 01 05:47:44 crc kubenswrapper[4661]: I1001 05:47:44.462169 4661 generic.go:334] "Generic (PLEG): container finished" podID="ed164c2a-4c12-4eb0-a6d5-e37d0818a485" containerID="61786f3482eadec9b644f895ece00cb4a5b234149d52d724f7c9420b06973761" exitCode=0 Oct 01 05:47:44 crc kubenswrapper[4661]: I1001 05:47:44.462197 4661 generic.go:334] "Generic (PLEG): container finished" podID="ed164c2a-4c12-4eb0-a6d5-e37d0818a485" containerID="0e0699d367b1f9b5183f6c53f3a7ad35da88f927509ba1de72bcfa012efa1a1b" exitCode=143 Oct 01 05:47:44 crc kubenswrapper[4661]: I1001 05:47:44.462238 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ed164c2a-4c12-4eb0-a6d5-e37d0818a485","Type":"ContainerDied","Data":"61786f3482eadec9b644f895ece00cb4a5b234149d52d724f7c9420b06973761"} Oct 01 05:47:44 crc kubenswrapper[4661]: I1001 05:47:44.462255 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ed164c2a-4c12-4eb0-a6d5-e37d0818a485","Type":"ContainerDied","Data":"0e0699d367b1f9b5183f6c53f3a7ad35da88f927509ba1de72bcfa012efa1a1b"} Oct 01 05:47:44 crc kubenswrapper[4661]: I1001 05:47:44.466991 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8674487c84-nz4kb" event={"ID":"5fa3d349-9844-4d00-ac96-5c59f46badfa","Type":"ContainerStarted","Data":"807a61224a2c0f3f2b67c701b6c1327b09ad43c8eca957f3fda17dc18a7d7a29"} Oct 01 05:47:44 crc kubenswrapper[4661]: I1001 05:47:44.475105 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=17.475086556 podStartE2EDuration="17.475086556s" podCreationTimestamp="2025-10-01 05:47:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:47:44.464263759 +0000 UTC m=+1113.402242373" watchObservedRunningTime="2025-10-01 05:47:44.475086556 +0000 UTC m=+1113.413065170" Oct 01 05:47:44 crc kubenswrapper[4661]: I1001 05:47:44.490886 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-77d96d88fb-5fr24" podStartSLOduration=13.4908669 podStartE2EDuration="13.4908669s" podCreationTimestamp="2025-10-01 05:47:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:47:44.486243783 +0000 UTC m=+1113.424222427" watchObservedRunningTime="2025-10-01 05:47:44.4908669 +0000 UTC m=+1113.428845514" Oct 01 05:47:44 crc kubenswrapper[4661]: I1001 05:47:44.515449 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-8674487c84-nz4kb" podStartSLOduration=13.515429034 podStartE2EDuration="13.515429034s" podCreationTimestamp="2025-10-01 05:47:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:47:44.507020043 +0000 UTC m=+1113.444998667" watchObservedRunningTime="2025-10-01 05:47:44.515429034 +0000 UTC m=+1113.453407648" Oct 01 05:47:45 crc kubenswrapper[4661]: I1001 05:47:45.498517 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-xp8s8" podStartSLOduration=3.49850248 podStartE2EDuration="3.49850248s" podCreationTimestamp="2025-10-01 05:47:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:47:45.494414298 +0000 UTC m=+1114.432392912" watchObservedRunningTime="2025-10-01 05:47:45.49850248 +0000 UTC m=+1114.436481094" Oct 01 05:47:45 crc kubenswrapper[4661]: I1001 05:47:45.743506 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7fbf467685-6nvqq" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.489495 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"917f129c-762e-4483-917a-15f30ea8727d","Type":"ContainerDied","Data":"5eab942f7bfca8cd48b35ce3086ec60cee329cae01bb97f436310d0766ce781a"} Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.489529 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5eab942f7bfca8cd48b35ce3086ec60cee329cae01bb97f436310d0766ce781a" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.494027 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ed164c2a-4c12-4eb0-a6d5-e37d0818a485","Type":"ContainerDied","Data":"ab0a59b3b5e85e14148e1acfe9c9c69cc0d2c5a90ed76a7f7d68d73446a79641"} Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.494082 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab0a59b3b5e85e14148e1acfe9c9c69cc0d2c5a90ed76a7f7d68d73446a79641" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.494048 4661 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.562903 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.567146 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.661276 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-internal-tls-certs\") pod \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.661335 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-config-data\") pod \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.661372 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"917f129c-762e-4483-917a-15f30ea8727d\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.661395 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-scripts\") pod \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.661422 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7grzf\" (UniqueName: \"kubernetes.io/projected/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-kube-api-access-7grzf\") pod \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.661462 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-logs\") pod \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.661483 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/917f129c-762e-4483-917a-15f30ea8727d-httpd-run\") pod \"917f129c-762e-4483-917a-15f30ea8727d\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.661517 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-combined-ca-bundle\") pod \"917f129c-762e-4483-917a-15f30ea8727d\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.661551 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/917f129c-762e-4483-917a-15f30ea8727d-logs\") pod \"917f129c-762e-4483-917a-15f30ea8727d\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.661572 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-httpd-run\") pod \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.661607 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-config-data\") pod \"917f129c-762e-4483-917a-15f30ea8727d\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.661640 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-scripts\") pod \"917f129c-762e-4483-917a-15f30ea8727d\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.661708 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2n5lm\" (UniqueName: \"kubernetes.io/projected/917f129c-762e-4483-917a-15f30ea8727d-kube-api-access-2n5lm\") pod \"917f129c-762e-4483-917a-15f30ea8727d\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.661730 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-combined-ca-bundle\") pod \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.661755 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\" (UID: \"ed164c2a-4c12-4eb0-a6d5-e37d0818a485\") " Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.661862 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-public-tls-certs\") pod \"917f129c-762e-4483-917a-15f30ea8727d\" (UID: \"917f129c-762e-4483-917a-15f30ea8727d\") " Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.666254 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.667285 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-logs" (OuterVolumeSpecName: "logs") pod "ed164c2a-4c12-4eb0-a6d5-e37d0818a485" (UID: "ed164c2a-4c12-4eb0-a6d5-e37d0818a485"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.673599 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/917f129c-762e-4483-917a-15f30ea8727d-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "917f129c-762e-4483-917a-15f30ea8727d" (UID: "917f129c-762e-4483-917a-15f30ea8727d"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.673778 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ed164c2a-4c12-4eb0-a6d5-e37d0818a485" (UID: "ed164c2a-4c12-4eb0-a6d5-e37d0818a485"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.673869 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-scripts" (OuterVolumeSpecName: "scripts") pod "ed164c2a-4c12-4eb0-a6d5-e37d0818a485" (UID: "ed164c2a-4c12-4eb0-a6d5-e37d0818a485"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.673910 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/917f129c-762e-4483-917a-15f30ea8727d-logs" (OuterVolumeSpecName: "logs") pod "917f129c-762e-4483-917a-15f30ea8727d" (UID: "917f129c-762e-4483-917a-15f30ea8727d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.691419 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-scripts" (OuterVolumeSpecName: "scripts") pod "917f129c-762e-4483-917a-15f30ea8727d" (UID: "917f129c-762e-4483-917a-15f30ea8727d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.710894 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-kube-api-access-7grzf" (OuterVolumeSpecName: "kube-api-access-7grzf") pod "ed164c2a-4c12-4eb0-a6d5-e37d0818a485" (UID: "ed164c2a-4c12-4eb0-a6d5-e37d0818a485"). InnerVolumeSpecName "kube-api-access-7grzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.710991 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "ed164c2a-4c12-4eb0-a6d5-e37d0818a485" (UID: "ed164c2a-4c12-4eb0-a6d5-e37d0818a485"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.711128 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "917f129c-762e-4483-917a-15f30ea8727d" (UID: "917f129c-762e-4483-917a-15f30ea8727d"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.717771 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/917f129c-762e-4483-917a-15f30ea8727d-kube-api-access-2n5lm" (OuterVolumeSpecName: "kube-api-access-2n5lm") pod "917f129c-762e-4483-917a-15f30ea8727d" (UID: "917f129c-762e-4483-917a-15f30ea8727d"). InnerVolumeSpecName "kube-api-access-2n5lm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.721420 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ed164c2a-4c12-4eb0-a6d5-e37d0818a485" (UID: "ed164c2a-4c12-4eb0-a6d5-e37d0818a485"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.742669 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "917f129c-762e-4483-917a-15f30ea8727d" (UID: "917f129c-762e-4483-917a-15f30ea8727d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.762670 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ed164c2a-4c12-4eb0-a6d5-e37d0818a485" (UID: "ed164c2a-4c12-4eb0-a6d5-e37d0818a485"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.763525 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.763550 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2n5lm\" (UniqueName: \"kubernetes.io/projected/917f129c-762e-4483-917a-15f30ea8727d-kube-api-access-2n5lm\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.763563 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.763582 4661 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.763592 4661 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.763604 4661 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.763613 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.763622 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7grzf\" (UniqueName: \"kubernetes.io/projected/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-kube-api-access-7grzf\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.763710 4661 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-logs\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.763721 4661 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/917f129c-762e-4483-917a-15f30ea8727d-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.763729 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.763737 4661 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/917f129c-762e-4483-917a-15f30ea8727d-logs\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.763753 4661 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.796860 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-config-data" (OuterVolumeSpecName: "config-data") pod "917f129c-762e-4483-917a-15f30ea8727d" (UID: "917f129c-762e-4483-917a-15f30ea8727d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.817449 4661 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.818259 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "917f129c-762e-4483-917a-15f30ea8727d" (UID: "917f129c-762e-4483-917a-15f30ea8727d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.840623 4661 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.848983 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-config-data" (OuterVolumeSpecName: "config-data") pod "ed164c2a-4c12-4eb0-a6d5-e37d0818a485" (UID: "ed164c2a-4c12-4eb0-a6d5-e37d0818a485"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.869111 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed164c2a-4c12-4eb0-a6d5-e37d0818a485-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.869140 4661 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.869164 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.869173 4661 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:46 crc kubenswrapper[4661]: I1001 05:47:46.869181 4661 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/917f129c-762e-4483-917a-15f30ea8727d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.503414 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7c73747-7ab3-4328-bec7-7708a39a50a2","Type":"ContainerStarted","Data":"2a7c1118ffd4120e1f35b10e93fd86f149a026bef063b4be0b11108c7754aa68"} Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.503751 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.505867 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.556496 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.563737 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.586077 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.591165 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.603304 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 05:47:47 crc kubenswrapper[4661]: E1001 05:47:47.603759 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="917f129c-762e-4483-917a-15f30ea8727d" containerName="glance-httpd" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.603773 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="917f129c-762e-4483-917a-15f30ea8727d" containerName="glance-httpd" Oct 01 05:47:47 crc kubenswrapper[4661]: E1001 05:47:47.603783 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed164c2a-4c12-4eb0-a6d5-e37d0818a485" containerName="glance-log" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.603790 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed164c2a-4c12-4eb0-a6d5-e37d0818a485" containerName="glance-log" Oct 01 05:47:47 crc kubenswrapper[4661]: E1001 05:47:47.603808 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed164c2a-4c12-4eb0-a6d5-e37d0818a485" containerName="glance-httpd" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.603816 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed164c2a-4c12-4eb0-a6d5-e37d0818a485" containerName="glance-httpd" Oct 01 05:47:47 crc kubenswrapper[4661]: E1001 05:47:47.603839 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="917f129c-762e-4483-917a-15f30ea8727d" containerName="glance-log" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.603846 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="917f129c-762e-4483-917a-15f30ea8727d" containerName="glance-log" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.604026 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="917f129c-762e-4483-917a-15f30ea8727d" containerName="glance-httpd" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.604038 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed164c2a-4c12-4eb0-a6d5-e37d0818a485" containerName="glance-log" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.604055 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed164c2a-4c12-4eb0-a6d5-e37d0818a485" containerName="glance-httpd" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.604064 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="917f129c-762e-4483-917a-15f30ea8727d" containerName="glance-log" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.605210 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.609985 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.610356 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-f48gr" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.610515 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.615310 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.617052 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.618584 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.620865 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.620906 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.636117 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.645007 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.679741 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-applier-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.681492 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.681567 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.681595 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wc22r\" (UniqueName: \"kubernetes.io/projected/43063fe8-a9f2-4dc2-a82d-1111fa60404d-kube-api-access-wc22r\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.681645 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-config-data\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.681714 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.681761 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.681799 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.681821 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzntc\" (UniqueName: \"kubernetes.io/projected/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-kube-api-access-nzntc\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.681841 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/43063fe8-a9f2-4dc2-a82d-1111fa60404d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.681890 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.681904 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.681943 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.681959 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-logs\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.681972 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.681988 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-scripts\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.682020 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/43063fe8-a9f2-4dc2-a82d-1111fa60404d-logs\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.781406 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="917f129c-762e-4483-917a-15f30ea8727d" path="/var/lib/kubelet/pods/917f129c-762e-4483-917a-15f30ea8727d/volumes" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.782441 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed164c2a-4c12-4eb0-a6d5-e37d0818a485" path="/var/lib/kubelet/pods/ed164c2a-4c12-4eb0-a6d5-e37d0818a485/volumes" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.783134 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.783187 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.783217 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzntc\" (UniqueName: \"kubernetes.io/projected/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-kube-api-access-nzntc\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.783234 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/43063fe8-a9f2-4dc2-a82d-1111fa60404d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.783313 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.783330 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.783356 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.783372 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-logs\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.783388 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.783405 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-scripts\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.783421 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/43063fe8-a9f2-4dc2-a82d-1111fa60404d-logs\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.783472 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.783493 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.783516 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wc22r\" (UniqueName: \"kubernetes.io/projected/43063fe8-a9f2-4dc2-a82d-1111fa60404d-kube-api-access-wc22r\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.783547 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-config-data\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.783576 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.784728 4661 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.785048 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-logs\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.785908 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/43063fe8-a9f2-4dc2-a82d-1111fa60404d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.786288 4661 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.792938 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.793172 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/43063fe8-a9f2-4dc2-a82d-1111fa60404d-logs\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.802481 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.811281 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.812346 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.814271 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-scripts\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.814300 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.816183 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.820670 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.825070 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-config-data\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.837519 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzntc\" (UniqueName: \"kubernetes.io/projected/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-kube-api-access-nzntc\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.853835 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.866477 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wc22r\" (UniqueName: \"kubernetes.io/projected/43063fe8-a9f2-4dc2-a82d-1111fa60404d-kube-api-access-wc22r\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.910000 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.942061 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 05:47:47 crc kubenswrapper[4661]: I1001 05:47:47.946192 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 01 05:47:48 crc kubenswrapper[4661]: I1001 05:47:48.382896 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Oct 01 05:47:48 crc kubenswrapper[4661]: I1001 05:47:48.383168 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-api-0" Oct 01 05:47:48 crc kubenswrapper[4661]: I1001 05:47:48.403822 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-api-0" Oct 01 05:47:48 crc kubenswrapper[4661]: I1001 05:47:48.533570 4661 generic.go:334] "Generic (PLEG): container finished" podID="046ddcaa-ecd4-4b69-90ac-c721d4a60fc3" containerID="8de448f1d29f5376eec6fc8283ca1bbad3def19755f762b9e4ba1597dfffe250" exitCode=0 Oct 01 05:47:48 crc kubenswrapper[4661]: I1001 05:47:48.533745 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xp8s8" event={"ID":"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3","Type":"ContainerDied","Data":"8de448f1d29f5376eec6fc8283ca1bbad3def19755f762b9e4ba1597dfffe250"} Oct 01 05:47:48 crc kubenswrapper[4661]: I1001 05:47:48.546764 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Oct 01 05:47:48 crc kubenswrapper[4661]: I1001 05:47:48.673039 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 05:47:48 crc kubenswrapper[4661]: I1001 05:47:48.860322 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 05:47:49 crc kubenswrapper[4661]: I1001 05:47:49.577535 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"43063fe8-a9f2-4dc2-a82d-1111fa60404d","Type":"ContainerStarted","Data":"088a2b6beeab9b3e38f659d4387a6c5b416cf553fbd703c7b1c6a52abfca4456"} Oct 01 05:47:49 crc kubenswrapper[4661]: I1001 05:47:49.578027 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"43063fe8-a9f2-4dc2-a82d-1111fa60404d","Type":"ContainerStarted","Data":"d5cadb2f593454f3742e1e3b0b442f06b5d52f37099d1772bcef07527831fac5"} Oct 01 05:47:49 crc kubenswrapper[4661]: I1001 05:47:49.588547 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf","Type":"ContainerStarted","Data":"86390f24ef6bb6b2aa07ae581fc2c5de9896a1c202b9469d1429619859335a1f"} Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.024955 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xp8s8" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.158809 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-scripts\") pod \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.159131 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-combined-ca-bundle\") pod \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.159159 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-config-data\") pod \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.159175 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-credential-keys\") pod \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.159229 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jj2cx\" (UniqueName: \"kubernetes.io/projected/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-kube-api-access-jj2cx\") pod \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.159265 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-fernet-keys\") pod \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\" (UID: \"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3\") " Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.165774 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-scripts" (OuterVolumeSpecName: "scripts") pod "046ddcaa-ecd4-4b69-90ac-c721d4a60fc3" (UID: "046ddcaa-ecd4-4b69-90ac-c721d4a60fc3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.166235 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "046ddcaa-ecd4-4b69-90ac-c721d4a60fc3" (UID: "046ddcaa-ecd4-4b69-90ac-c721d4a60fc3"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.170157 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-kube-api-access-jj2cx" (OuterVolumeSpecName: "kube-api-access-jj2cx") pod "046ddcaa-ecd4-4b69-90ac-c721d4a60fc3" (UID: "046ddcaa-ecd4-4b69-90ac-c721d4a60fc3"). InnerVolumeSpecName "kube-api-access-jj2cx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.170759 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "046ddcaa-ecd4-4b69-90ac-c721d4a60fc3" (UID: "046ddcaa-ecd4-4b69-90ac-c721d4a60fc3"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.197776 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-config-data" (OuterVolumeSpecName: "config-data") pod "046ddcaa-ecd4-4b69-90ac-c721d4a60fc3" (UID: "046ddcaa-ecd4-4b69-90ac-c721d4a60fc3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.211745 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "046ddcaa-ecd4-4b69-90ac-c721d4a60fc3" (UID: "046ddcaa-ecd4-4b69-90ac-c721d4a60fc3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.261881 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jj2cx\" (UniqueName: \"kubernetes.io/projected/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-kube-api-access-jj2cx\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.261921 4661 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.261931 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.261939 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.261947 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.261955 4661 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.649468 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7677654df9-tdbxq"] Oct 01 05:47:50 crc kubenswrapper[4661]: E1001 05:47:50.649974 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="046ddcaa-ecd4-4b69-90ac-c721d4a60fc3" containerName="keystone-bootstrap" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.649994 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="046ddcaa-ecd4-4b69-90ac-c721d4a60fc3" containerName="keystone-bootstrap" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.650167 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="046ddcaa-ecd4-4b69-90ac-c721d4a60fc3" containerName="keystone-bootstrap" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.651619 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf","Type":"ContainerStarted","Data":"180dbde970bd2d19fcd43b70d2820b5f294807708c272589488401c5539d6066"} Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.651728 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.660372 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.660489 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.664799 4661 generic.go:334] "Generic (PLEG): container finished" podID="000282a3-18a6-4ea3-8055-ae4ad12ed82f" containerID="66c30d91bfe1cf0a2f9433ecca0b7a396c346c95552cd932fc0ceb14b7c30c1d" exitCode=0 Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.664856 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-6s9q7" event={"ID":"000282a3-18a6-4ea3-8055-ae4ad12ed82f","Type":"ContainerDied","Data":"66c30d91bfe1cf0a2f9433ecca0b7a396c346c95552cd932fc0ceb14b7c30c1d"} Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.674736 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7677654df9-tdbxq"] Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.690549 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xp8s8" event={"ID":"046ddcaa-ecd4-4b69-90ac-c721d4a60fc3","Type":"ContainerDied","Data":"9e12d653efb8d662b596f3ab5cf74aae4591ced696f0730a8d87bd7c436b2ee0"} Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.690587 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e12d653efb8d662b596f3ab5cf74aae4591ced696f0730a8d87bd7c436b2ee0" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.691126 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xp8s8" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.699909 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"43063fe8-a9f2-4dc2-a82d-1111fa60404d","Type":"ContainerStarted","Data":"90a40e5ba4cabeaf17cb7d3073eb0cdf3cd7460c92bb1861047fbadfef425898"} Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.743610 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.743588907 podStartE2EDuration="3.743588907s" podCreationTimestamp="2025-10-01 05:47:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:47:50.731261958 +0000 UTC m=+1119.669240572" watchObservedRunningTime="2025-10-01 05:47:50.743588907 +0000 UTC m=+1119.681567521" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.768692 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-internal-tls-certs\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.769021 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-config-data\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.769073 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-scripts\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.769096 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-credential-keys\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.769132 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-combined-ca-bundle\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.769157 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzs2t\" (UniqueName: \"kubernetes.io/projected/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-kube-api-access-hzs2t\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.769179 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-public-tls-certs\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.769196 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-fernet-keys\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.870492 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-combined-ca-bundle\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.870561 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzs2t\" (UniqueName: \"kubernetes.io/projected/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-kube-api-access-hzs2t\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.870585 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-public-tls-certs\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.870605 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-fernet-keys\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.870683 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-internal-tls-certs\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.870734 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-config-data\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.870793 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-scripts\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.870831 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-credential-keys\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.881777 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-credential-keys\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.884617 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-internal-tls-certs\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.885122 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-combined-ca-bundle\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.885440 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-fernet-keys\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.887301 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-config-data\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.894697 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-scripts\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.900081 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzs2t\" (UniqueName: \"kubernetes.io/projected/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-kube-api-access-hzs2t\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.906055 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2287a17-7b4e-40d0-ba56-0e78abd1b1ec-public-tls-certs\") pod \"keystone-7677654df9-tdbxq\" (UID: \"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec\") " pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:50 crc kubenswrapper[4661]: I1001 05:47:50.982282 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:51 crc kubenswrapper[4661]: I1001 05:47:51.365254 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Oct 01 05:47:51 crc kubenswrapper[4661]: I1001 05:47:51.517643 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7677654df9-tdbxq"] Oct 01 05:47:51 crc kubenswrapper[4661]: I1001 05:47:51.728735 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:51 crc kubenswrapper[4661]: I1001 05:47:51.728768 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:47:51 crc kubenswrapper[4661]: I1001 05:47:51.734030 4661 generic.go:334] "Generic (PLEG): container finished" podID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" containerID="db48fc7a4a6734f309e14ca7d5365567692626d11b1764b5d5ac2c1d7cd35ddf" exitCode=1 Oct 01 05:47:51 crc kubenswrapper[4661]: I1001 05:47:51.734110 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"87a9e404-beb1-4f1d-a7a2-188ccdacbb81","Type":"ContainerDied","Data":"db48fc7a4a6734f309e14ca7d5365567692626d11b1764b5d5ac2c1d7cd35ddf"} Oct 01 05:47:51 crc kubenswrapper[4661]: I1001 05:47:51.734535 4661 scope.go:117] "RemoveContainer" containerID="db48fc7a4a6734f309e14ca7d5365567692626d11b1764b5d5ac2c1d7cd35ddf" Oct 01 05:47:51 crc kubenswrapper[4661]: I1001 05:47:51.736968 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7677654df9-tdbxq" event={"ID":"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec","Type":"ContainerStarted","Data":"f73aeaf455661aaa8aaa5ad950f3302f46ba6b3d4cc5b432f8041599800ac495"} Oct 01 05:47:51 crc kubenswrapper[4661]: I1001 05:47:51.749713 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf","Type":"ContainerStarted","Data":"10d091cd3ef24b4abceb825ffef871176a5232bfc9bd93f25c89e39f3b7d43f6"} Oct 01 05:47:51 crc kubenswrapper[4661]: I1001 05:47:51.750309 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="68e4ec18-29f5-4e33-aff6-1460ca4b6f0e" containerName="watcher-api-log" containerID="cri-o://77e6919ae7a66f497ea77c840a4caaf333f93857d1b9052c92d6865e26c65aa0" gracePeriod=30 Oct 01 05:47:51 crc kubenswrapper[4661]: I1001 05:47:51.750586 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="68e4ec18-29f5-4e33-aff6-1460ca4b6f0e" containerName="watcher-api" containerID="cri-o://77474c49cc55dbbb44863893d64cbbae78b54f0faee978b299e3d5a8651499a2" gracePeriod=30 Oct 01 05:47:51 crc kubenswrapper[4661]: I1001 05:47:51.783735 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.78372109 podStartE2EDuration="4.78372109s" podCreationTimestamp="2025-10-01 05:47:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:47:51.782111176 +0000 UTC m=+1120.720089790" watchObservedRunningTime="2025-10-01 05:47:51.78372109 +0000 UTC m=+1120.721699694" Oct 01 05:47:51 crc kubenswrapper[4661]: I1001 05:47:51.833755 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:51 crc kubenswrapper[4661]: I1001 05:47:51.838083 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.219019 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-6s9q7" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.328156 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/000282a3-18a6-4ea3-8055-ae4ad12ed82f-logs\") pod \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\" (UID: \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\") " Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.328243 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pq7mw\" (UniqueName: \"kubernetes.io/projected/000282a3-18a6-4ea3-8055-ae4ad12ed82f-kube-api-access-pq7mw\") pod \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\" (UID: \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\") " Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.328393 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/000282a3-18a6-4ea3-8055-ae4ad12ed82f-scripts\") pod \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\" (UID: \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\") " Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.328431 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/000282a3-18a6-4ea3-8055-ae4ad12ed82f-combined-ca-bundle\") pod \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\" (UID: \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\") " Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.328465 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/000282a3-18a6-4ea3-8055-ae4ad12ed82f-config-data\") pod \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\" (UID: \"000282a3-18a6-4ea3-8055-ae4ad12ed82f\") " Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.333806 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/000282a3-18a6-4ea3-8055-ae4ad12ed82f-logs" (OuterVolumeSpecName: "logs") pod "000282a3-18a6-4ea3-8055-ae4ad12ed82f" (UID: "000282a3-18a6-4ea3-8055-ae4ad12ed82f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.340844 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/000282a3-18a6-4ea3-8055-ae4ad12ed82f-kube-api-access-pq7mw" (OuterVolumeSpecName: "kube-api-access-pq7mw") pod "000282a3-18a6-4ea3-8055-ae4ad12ed82f" (UID: "000282a3-18a6-4ea3-8055-ae4ad12ed82f"). InnerVolumeSpecName "kube-api-access-pq7mw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.355823 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/000282a3-18a6-4ea3-8055-ae4ad12ed82f-scripts" (OuterVolumeSpecName: "scripts") pod "000282a3-18a6-4ea3-8055-ae4ad12ed82f" (UID: "000282a3-18a6-4ea3-8055-ae4ad12ed82f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.368470 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/000282a3-18a6-4ea3-8055-ae4ad12ed82f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "000282a3-18a6-4ea3-8055-ae4ad12ed82f" (UID: "000282a3-18a6-4ea3-8055-ae4ad12ed82f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.368889 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/000282a3-18a6-4ea3-8055-ae4ad12ed82f-config-data" (OuterVolumeSpecName: "config-data") pod "000282a3-18a6-4ea3-8055-ae4ad12ed82f" (UID: "000282a3-18a6-4ea3-8055-ae4ad12ed82f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.429988 4661 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/000282a3-18a6-4ea3-8055-ae4ad12ed82f-logs\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.430013 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pq7mw\" (UniqueName: \"kubernetes.io/projected/000282a3-18a6-4ea3-8055-ae4ad12ed82f-kube-api-access-pq7mw\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.430023 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/000282a3-18a6-4ea3-8055-ae4ad12ed82f-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.430032 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/000282a3-18a6-4ea3-8055-ae4ad12ed82f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.430040 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/000282a3-18a6-4ea3-8055-ae4ad12ed82f-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.679179 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-applier-0" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.679398 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.679426 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.679445 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.682889 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/watcher-decision-engine-0" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.716918 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-applier-0" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.717200 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5ff7f46dd9-gj8q6" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.762306 4661 generic.go:334] "Generic (PLEG): container finished" podID="68e4ec18-29f5-4e33-aff6-1460ca4b6f0e" containerID="77474c49cc55dbbb44863893d64cbbae78b54f0faee978b299e3d5a8651499a2" exitCode=0 Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.762342 4661 generic.go:334] "Generic (PLEG): container finished" podID="68e4ec18-29f5-4e33-aff6-1460ca4b6f0e" containerID="77e6919ae7a66f497ea77c840a4caaf333f93857d1b9052c92d6865e26c65aa0" exitCode=143 Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.762379 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e","Type":"ContainerDied","Data":"77474c49cc55dbbb44863893d64cbbae78b54f0faee978b299e3d5a8651499a2"} Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.762518 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e","Type":"ContainerDied","Data":"77e6919ae7a66f497ea77c840a4caaf333f93857d1b9052c92d6865e26c65aa0"} Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.764052 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-6s9q7" event={"ID":"000282a3-18a6-4ea3-8055-ae4ad12ed82f","Type":"ContainerDied","Data":"76d7a90a753d4fc15e44f8d8a7e4a847cff8df1fa3cb8e1a7521215053971136"} Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.764073 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="76d7a90a753d4fc15e44f8d8a7e4a847cff8df1fa3cb8e1a7521215053971136" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.764111 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-6s9q7" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.777054 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"87a9e404-beb1-4f1d-a7a2-188ccdacbb81","Type":"ContainerStarted","Data":"2c97145622cba5b71615f2f68a3014deecd6a82fa33d9c2e2fd292d7f102cb5a"} Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.784842 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7677654df9-tdbxq" event={"ID":"a2287a17-7b4e-40d0-ba56-0e78abd1b1ec","Type":"ContainerStarted","Data":"36902bda1349077b316d87ccffc6d7bd461f50e1c3e24c4c9c6064c59f2c0f23"} Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.785650 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.823866 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-applier-0" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.850206 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-7677654df9-tdbxq" podStartSLOduration=2.850189387 podStartE2EDuration="2.850189387s" podCreationTimestamp="2025-10-01 05:47:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:47:52.830939468 +0000 UTC m=+1121.768918082" watchObservedRunningTime="2025-10-01 05:47:52.850189387 +0000 UTC m=+1121.788168001" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.907605 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-applier-0"] Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.919464 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-558754b4b6-4khhg"] Oct 01 05:47:52 crc kubenswrapper[4661]: E1001 05:47:52.919847 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="000282a3-18a6-4ea3-8055-ae4ad12ed82f" containerName="placement-db-sync" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.919859 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="000282a3-18a6-4ea3-8055-ae4ad12ed82f" containerName="placement-db-sync" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.920034 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="000282a3-18a6-4ea3-8055-ae4ad12ed82f" containerName="placement-db-sync" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.920924 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.925790 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.925885 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.926034 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-bj9rg" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.926135 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.926187 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.938308 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59481d9b-6c9f-48ac-93d1-870dbfb6edaf-combined-ca-bundle\") pod \"placement-558754b4b6-4khhg\" (UID: \"59481d9b-6c9f-48ac-93d1-870dbfb6edaf\") " pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.938365 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59481d9b-6c9f-48ac-93d1-870dbfb6edaf-scripts\") pod \"placement-558754b4b6-4khhg\" (UID: \"59481d9b-6c9f-48ac-93d1-870dbfb6edaf\") " pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.938400 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59481d9b-6c9f-48ac-93d1-870dbfb6edaf-config-data\") pod \"placement-558754b4b6-4khhg\" (UID: \"59481d9b-6c9f-48ac-93d1-870dbfb6edaf\") " pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.938429 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9j469\" (UniqueName: \"kubernetes.io/projected/59481d9b-6c9f-48ac-93d1-870dbfb6edaf-kube-api-access-9j469\") pod \"placement-558754b4b6-4khhg\" (UID: \"59481d9b-6c9f-48ac-93d1-870dbfb6edaf\") " pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.938452 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/59481d9b-6c9f-48ac-93d1-870dbfb6edaf-internal-tls-certs\") pod \"placement-558754b4b6-4khhg\" (UID: \"59481d9b-6c9f-48ac-93d1-870dbfb6edaf\") " pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.938489 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/59481d9b-6c9f-48ac-93d1-870dbfb6edaf-logs\") pod \"placement-558754b4b6-4khhg\" (UID: \"59481d9b-6c9f-48ac-93d1-870dbfb6edaf\") " pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.938523 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/59481d9b-6c9f-48ac-93d1-870dbfb6edaf-public-tls-certs\") pod \"placement-558754b4b6-4khhg\" (UID: \"59481d9b-6c9f-48ac-93d1-870dbfb6edaf\") " pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:52 crc kubenswrapper[4661]: I1001 05:47:52.970135 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-558754b4b6-4khhg"] Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.040712 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/59481d9b-6c9f-48ac-93d1-870dbfb6edaf-logs\") pod \"placement-558754b4b6-4khhg\" (UID: \"59481d9b-6c9f-48ac-93d1-870dbfb6edaf\") " pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.040795 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/59481d9b-6c9f-48ac-93d1-870dbfb6edaf-public-tls-certs\") pod \"placement-558754b4b6-4khhg\" (UID: \"59481d9b-6c9f-48ac-93d1-870dbfb6edaf\") " pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.040895 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59481d9b-6c9f-48ac-93d1-870dbfb6edaf-combined-ca-bundle\") pod \"placement-558754b4b6-4khhg\" (UID: \"59481d9b-6c9f-48ac-93d1-870dbfb6edaf\") " pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.040923 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59481d9b-6c9f-48ac-93d1-870dbfb6edaf-scripts\") pod \"placement-558754b4b6-4khhg\" (UID: \"59481d9b-6c9f-48ac-93d1-870dbfb6edaf\") " pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.040963 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59481d9b-6c9f-48ac-93d1-870dbfb6edaf-config-data\") pod \"placement-558754b4b6-4khhg\" (UID: \"59481d9b-6c9f-48ac-93d1-870dbfb6edaf\") " pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.041008 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9j469\" (UniqueName: \"kubernetes.io/projected/59481d9b-6c9f-48ac-93d1-870dbfb6edaf-kube-api-access-9j469\") pod \"placement-558754b4b6-4khhg\" (UID: \"59481d9b-6c9f-48ac-93d1-870dbfb6edaf\") " pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.041037 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/59481d9b-6c9f-48ac-93d1-870dbfb6edaf-internal-tls-certs\") pod \"placement-558754b4b6-4khhg\" (UID: \"59481d9b-6c9f-48ac-93d1-870dbfb6edaf\") " pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.042854 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/59481d9b-6c9f-48ac-93d1-870dbfb6edaf-logs\") pod \"placement-558754b4b6-4khhg\" (UID: \"59481d9b-6c9f-48ac-93d1-870dbfb6edaf\") " pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.051110 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/59481d9b-6c9f-48ac-93d1-870dbfb6edaf-internal-tls-certs\") pod \"placement-558754b4b6-4khhg\" (UID: \"59481d9b-6c9f-48ac-93d1-870dbfb6edaf\") " pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.053247 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59481d9b-6c9f-48ac-93d1-870dbfb6edaf-combined-ca-bundle\") pod \"placement-558754b4b6-4khhg\" (UID: \"59481d9b-6c9f-48ac-93d1-870dbfb6edaf\") " pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.053655 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59481d9b-6c9f-48ac-93d1-870dbfb6edaf-config-data\") pod \"placement-558754b4b6-4khhg\" (UID: \"59481d9b-6c9f-48ac-93d1-870dbfb6edaf\") " pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.059210 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/59481d9b-6c9f-48ac-93d1-870dbfb6edaf-public-tls-certs\") pod \"placement-558754b4b6-4khhg\" (UID: \"59481d9b-6c9f-48ac-93d1-870dbfb6edaf\") " pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.060362 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59481d9b-6c9f-48ac-93d1-870dbfb6edaf-scripts\") pod \"placement-558754b4b6-4khhg\" (UID: \"59481d9b-6c9f-48ac-93d1-870dbfb6edaf\") " pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.065022 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9j469\" (UniqueName: \"kubernetes.io/projected/59481d9b-6c9f-48ac-93d1-870dbfb6edaf-kube-api-access-9j469\") pod \"placement-558754b4b6-4khhg\" (UID: \"59481d9b-6c9f-48ac-93d1-870dbfb6edaf\") " pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.259042 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.278177 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.353536 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-logs\") pod \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\" (UID: \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\") " Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.353611 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-config-data\") pod \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\" (UID: \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\") " Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.354132 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-logs" (OuterVolumeSpecName: "logs") pod "68e4ec18-29f5-4e33-aff6-1460ca4b6f0e" (UID: "68e4ec18-29f5-4e33-aff6-1460ca4b6f0e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.354247 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-custom-prometheus-ca\") pod \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\" (UID: \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\") " Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.354951 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5rzf\" (UniqueName: \"kubernetes.io/projected/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-kube-api-access-s5rzf\") pod \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\" (UID: \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\") " Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.355676 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-combined-ca-bundle\") pod \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\" (UID: \"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e\") " Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.356710 4661 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-logs\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.361693 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-kube-api-access-s5rzf" (OuterVolumeSpecName: "kube-api-access-s5rzf") pod "68e4ec18-29f5-4e33-aff6-1460ca4b6f0e" (UID: "68e4ec18-29f5-4e33-aff6-1460ca4b6f0e"). InnerVolumeSpecName "kube-api-access-s5rzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.380432 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "68e4ec18-29f5-4e33-aff6-1460ca4b6f0e" (UID: "68e4ec18-29f5-4e33-aff6-1460ca4b6f0e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.412492 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-config-data" (OuterVolumeSpecName: "config-data") pod "68e4ec18-29f5-4e33-aff6-1460ca4b6f0e" (UID: "68e4ec18-29f5-4e33-aff6-1460ca4b6f0e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.414883 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "68e4ec18-29f5-4e33-aff6-1460ca4b6f0e" (UID: "68e4ec18-29f5-4e33-aff6-1460ca4b6f0e"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.459067 4661 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.459115 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5rzf\" (UniqueName: \"kubernetes.io/projected/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-kube-api-access-s5rzf\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.459129 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.459144 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.804077 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.804497 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"68e4ec18-29f5-4e33-aff6-1460ca4b6f0e","Type":"ContainerDied","Data":"f4e032b5b10de8de33a419a3446a6ae5ea868b661163b7f9f3d063bffeacf139"} Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.804528 4661 scope.go:117] "RemoveContainer" containerID="77474c49cc55dbbb44863893d64cbbae78b54f0faee978b299e3d5a8651499a2" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.827394 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.840183 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-api-0"] Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.852264 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Oct 01 05:47:53 crc kubenswrapper[4661]: E1001 05:47:53.852712 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68e4ec18-29f5-4e33-aff6-1460ca4b6f0e" containerName="watcher-api-log" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.852723 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="68e4ec18-29f5-4e33-aff6-1460ca4b6f0e" containerName="watcher-api-log" Oct 01 05:47:53 crc kubenswrapper[4661]: E1001 05:47:53.852741 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68e4ec18-29f5-4e33-aff6-1460ca4b6f0e" containerName="watcher-api" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.852747 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="68e4ec18-29f5-4e33-aff6-1460ca4b6f0e" containerName="watcher-api" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.852950 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="68e4ec18-29f5-4e33-aff6-1460ca4b6f0e" containerName="watcher-api" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.852974 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="68e4ec18-29f5-4e33-aff6-1460ca4b6f0e" containerName="watcher-api-log" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.854380 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.872423 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-watcher-public-svc" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.876963 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.877106 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.879611 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-watcher-internal-svc" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.969269 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0858ffab-0d1d-422b-8ac2-abeef9ab22ed-public-tls-certs\") pod \"watcher-api-0\" (UID: \"0858ffab-0d1d-422b-8ac2-abeef9ab22ed\") " pod="openstack/watcher-api-0" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.969367 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0858ffab-0d1d-422b-8ac2-abeef9ab22ed-logs\") pod \"watcher-api-0\" (UID: \"0858ffab-0d1d-422b-8ac2-abeef9ab22ed\") " pod="openstack/watcher-api-0" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.969421 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9d7mp\" (UniqueName: \"kubernetes.io/projected/0858ffab-0d1d-422b-8ac2-abeef9ab22ed-kube-api-access-9d7mp\") pod \"watcher-api-0\" (UID: \"0858ffab-0d1d-422b-8ac2-abeef9ab22ed\") " pod="openstack/watcher-api-0" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.969439 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0858ffab-0d1d-422b-8ac2-abeef9ab22ed-config-data\") pod \"watcher-api-0\" (UID: \"0858ffab-0d1d-422b-8ac2-abeef9ab22ed\") " pod="openstack/watcher-api-0" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.969605 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0858ffab-0d1d-422b-8ac2-abeef9ab22ed-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"0858ffab-0d1d-422b-8ac2-abeef9ab22ed\") " pod="openstack/watcher-api-0" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.969721 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/0858ffab-0d1d-422b-8ac2-abeef9ab22ed-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"0858ffab-0d1d-422b-8ac2-abeef9ab22ed\") " pod="openstack/watcher-api-0" Oct 01 05:47:53 crc kubenswrapper[4661]: I1001 05:47:53.969749 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0858ffab-0d1d-422b-8ac2-abeef9ab22ed-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"0858ffab-0d1d-422b-8ac2-abeef9ab22ed\") " pod="openstack/watcher-api-0" Oct 01 05:47:54 crc kubenswrapper[4661]: I1001 05:47:54.071509 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0858ffab-0d1d-422b-8ac2-abeef9ab22ed-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"0858ffab-0d1d-422b-8ac2-abeef9ab22ed\") " pod="openstack/watcher-api-0" Oct 01 05:47:54 crc kubenswrapper[4661]: I1001 05:47:54.071566 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/0858ffab-0d1d-422b-8ac2-abeef9ab22ed-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"0858ffab-0d1d-422b-8ac2-abeef9ab22ed\") " pod="openstack/watcher-api-0" Oct 01 05:47:54 crc kubenswrapper[4661]: I1001 05:47:54.071588 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0858ffab-0d1d-422b-8ac2-abeef9ab22ed-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"0858ffab-0d1d-422b-8ac2-abeef9ab22ed\") " pod="openstack/watcher-api-0" Oct 01 05:47:54 crc kubenswrapper[4661]: I1001 05:47:54.071617 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0858ffab-0d1d-422b-8ac2-abeef9ab22ed-public-tls-certs\") pod \"watcher-api-0\" (UID: \"0858ffab-0d1d-422b-8ac2-abeef9ab22ed\") " pod="openstack/watcher-api-0" Oct 01 05:47:54 crc kubenswrapper[4661]: I1001 05:47:54.071655 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0858ffab-0d1d-422b-8ac2-abeef9ab22ed-logs\") pod \"watcher-api-0\" (UID: \"0858ffab-0d1d-422b-8ac2-abeef9ab22ed\") " pod="openstack/watcher-api-0" Oct 01 05:47:54 crc kubenswrapper[4661]: I1001 05:47:54.072096 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0858ffab-0d1d-422b-8ac2-abeef9ab22ed-config-data\") pod \"watcher-api-0\" (UID: \"0858ffab-0d1d-422b-8ac2-abeef9ab22ed\") " pod="openstack/watcher-api-0" Oct 01 05:47:54 crc kubenswrapper[4661]: I1001 05:47:54.072121 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9d7mp\" (UniqueName: \"kubernetes.io/projected/0858ffab-0d1d-422b-8ac2-abeef9ab22ed-kube-api-access-9d7mp\") pod \"watcher-api-0\" (UID: \"0858ffab-0d1d-422b-8ac2-abeef9ab22ed\") " pod="openstack/watcher-api-0" Oct 01 05:47:54 crc kubenswrapper[4661]: I1001 05:47:54.072411 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0858ffab-0d1d-422b-8ac2-abeef9ab22ed-logs\") pod \"watcher-api-0\" (UID: \"0858ffab-0d1d-422b-8ac2-abeef9ab22ed\") " pod="openstack/watcher-api-0" Oct 01 05:47:54 crc kubenswrapper[4661]: I1001 05:47:54.076183 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0858ffab-0d1d-422b-8ac2-abeef9ab22ed-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"0858ffab-0d1d-422b-8ac2-abeef9ab22ed\") " pod="openstack/watcher-api-0" Oct 01 05:47:54 crc kubenswrapper[4661]: I1001 05:47:54.082443 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0858ffab-0d1d-422b-8ac2-abeef9ab22ed-public-tls-certs\") pod \"watcher-api-0\" (UID: \"0858ffab-0d1d-422b-8ac2-abeef9ab22ed\") " pod="openstack/watcher-api-0" Oct 01 05:47:54 crc kubenswrapper[4661]: I1001 05:47:54.085546 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0858ffab-0d1d-422b-8ac2-abeef9ab22ed-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"0858ffab-0d1d-422b-8ac2-abeef9ab22ed\") " pod="openstack/watcher-api-0" Oct 01 05:47:54 crc kubenswrapper[4661]: I1001 05:47:54.095594 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9d7mp\" (UniqueName: \"kubernetes.io/projected/0858ffab-0d1d-422b-8ac2-abeef9ab22ed-kube-api-access-9d7mp\") pod \"watcher-api-0\" (UID: \"0858ffab-0d1d-422b-8ac2-abeef9ab22ed\") " pod="openstack/watcher-api-0" Oct 01 05:47:54 crc kubenswrapper[4661]: I1001 05:47:54.095705 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0858ffab-0d1d-422b-8ac2-abeef9ab22ed-config-data\") pod \"watcher-api-0\" (UID: \"0858ffab-0d1d-422b-8ac2-abeef9ab22ed\") " pod="openstack/watcher-api-0" Oct 01 05:47:54 crc kubenswrapper[4661]: I1001 05:47:54.096603 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/0858ffab-0d1d-422b-8ac2-abeef9ab22ed-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"0858ffab-0d1d-422b-8ac2-abeef9ab22ed\") " pod="openstack/watcher-api-0" Oct 01 05:47:54 crc kubenswrapper[4661]: I1001 05:47:54.188163 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Oct 01 05:47:54 crc kubenswrapper[4661]: I1001 05:47:54.809958 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-applier-0" podUID="e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12" containerName="watcher-applier" containerID="cri-o://f2b5d2a8f4e0cf1118cb15a9a02e9bae2637c7d361436b6a40f7440935d6b215" gracePeriod=30 Oct 01 05:47:55 crc kubenswrapper[4661]: I1001 05:47:55.768367 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68e4ec18-29f5-4e33-aff6-1460ca4b6f0e" path="/var/lib/kubelet/pods/68e4ec18-29f5-4e33-aff6-1460ca4b6f0e/volumes" Oct 01 05:47:55 crc kubenswrapper[4661]: I1001 05:47:55.818674 4661 generic.go:334] "Generic (PLEG): container finished" podID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" containerID="2c97145622cba5b71615f2f68a3014deecd6a82fa33d9c2e2fd292d7f102cb5a" exitCode=1 Oct 01 05:47:55 crc kubenswrapper[4661]: I1001 05:47:55.818715 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"87a9e404-beb1-4f1d-a7a2-188ccdacbb81","Type":"ContainerDied","Data":"2c97145622cba5b71615f2f68a3014deecd6a82fa33d9c2e2fd292d7f102cb5a"} Oct 01 05:47:55 crc kubenswrapper[4661]: I1001 05:47:55.819321 4661 scope.go:117] "RemoveContainer" containerID="2c97145622cba5b71615f2f68a3014deecd6a82fa33d9c2e2fd292d7f102cb5a" Oct 01 05:47:55 crc kubenswrapper[4661]: E1001 05:47:55.819567 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 10s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(87a9e404-beb1-4f1d-a7a2-188ccdacbb81)\"" pod="openstack/watcher-decision-engine-0" podUID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" Oct 01 05:47:57 crc kubenswrapper[4661]: I1001 05:47:57.556034 4661 scope.go:117] "RemoveContainer" containerID="77e6919ae7a66f497ea77c840a4caaf333f93857d1b9052c92d6865e26c65aa0" Oct 01 05:47:57 crc kubenswrapper[4661]: E1001 05:47:57.682432 4661 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f2b5d2a8f4e0cf1118cb15a9a02e9bae2637c7d361436b6a40f7440935d6b215 is running failed: container process not found" containerID="f2b5d2a8f4e0cf1118cb15a9a02e9bae2637c7d361436b6a40f7440935d6b215" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Oct 01 05:47:57 crc kubenswrapper[4661]: E1001 05:47:57.682926 4661 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f2b5d2a8f4e0cf1118cb15a9a02e9bae2637c7d361436b6a40f7440935d6b215 is running failed: container process not found" containerID="f2b5d2a8f4e0cf1118cb15a9a02e9bae2637c7d361436b6a40f7440935d6b215" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Oct 01 05:47:57 crc kubenswrapper[4661]: E1001 05:47:57.683324 4661 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f2b5d2a8f4e0cf1118cb15a9a02e9bae2637c7d361436b6a40f7440935d6b215 is running failed: container process not found" containerID="f2b5d2a8f4e0cf1118cb15a9a02e9bae2637c7d361436b6a40f7440935d6b215" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Oct 01 05:47:57 crc kubenswrapper[4661]: E1001 05:47:57.683351 4661 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f2b5d2a8f4e0cf1118cb15a9a02e9bae2637c7d361436b6a40f7440935d6b215 is running failed: container process not found" probeType="Readiness" pod="openstack/watcher-applier-0" podUID="e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12" containerName="watcher-applier" Oct 01 05:47:57 crc kubenswrapper[4661]: I1001 05:47:57.733366 4661 scope.go:117] "RemoveContainer" containerID="db48fc7a4a6734f309e14ca7d5365567692626d11b1764b5d5ac2c1d7cd35ddf" Oct 01 05:47:57 crc kubenswrapper[4661]: I1001 05:47:57.849152 4661 generic.go:334] "Generic (PLEG): container finished" podID="e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12" containerID="f2b5d2a8f4e0cf1118cb15a9a02e9bae2637c7d361436b6a40f7440935d6b215" exitCode=0 Oct 01 05:47:57 crc kubenswrapper[4661]: I1001 05:47:57.849346 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12","Type":"ContainerDied","Data":"f2b5d2a8f4e0cf1118cb15a9a02e9bae2637c7d361436b6a40f7440935d6b215"} Oct 01 05:47:57 crc kubenswrapper[4661]: I1001 05:47:57.878493 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Oct 01 05:47:57 crc kubenswrapper[4661]: I1001 05:47:57.943535 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 01 05:47:57 crc kubenswrapper[4661]: I1001 05:47:57.943577 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 01 05:47:57 crc kubenswrapper[4661]: I1001 05:47:57.946268 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-combined-ca-bundle\") pod \"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12\" (UID: \"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12\") " Oct 01 05:47:57 crc kubenswrapper[4661]: I1001 05:47:57.946454 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-logs\") pod \"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12\" (UID: \"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12\") " Oct 01 05:47:57 crc kubenswrapper[4661]: I1001 05:47:57.946494 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-config-data\") pod \"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12\" (UID: \"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12\") " Oct 01 05:47:57 crc kubenswrapper[4661]: I1001 05:47:57.946535 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fh4ts\" (UniqueName: \"kubernetes.io/projected/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-kube-api-access-fh4ts\") pod \"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12\" (UID: \"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12\") " Oct 01 05:47:57 crc kubenswrapper[4661]: I1001 05:47:57.947068 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-logs" (OuterVolumeSpecName: "logs") pod "e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12" (UID: "e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:47:57 crc kubenswrapper[4661]: I1001 05:47:57.948164 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 01 05:47:57 crc kubenswrapper[4661]: I1001 05:47:57.948191 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 01 05:47:57 crc kubenswrapper[4661]: I1001 05:47:57.950316 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-kube-api-access-fh4ts" (OuterVolumeSpecName: "kube-api-access-fh4ts") pod "e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12" (UID: "e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12"). InnerVolumeSpecName "kube-api-access-fh4ts". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:47:57 crc kubenswrapper[4661]: I1001 05:47:57.990762 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12" (UID: "e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.020371 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.029127 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.030798 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.057094 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.058875 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fh4ts\" (UniqueName: \"kubernetes.io/projected/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-kube-api-access-fh4ts\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.058906 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.058916 4661 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-logs\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.075422 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-config-data" (OuterVolumeSpecName: "config-data") pod "e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12" (UID: "e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.144228 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-558754b4b6-4khhg"] Oct 01 05:47:58 crc kubenswrapper[4661]: W1001 05:47:58.149451 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod59481d9b_6c9f_48ac_93d1_870dbfb6edaf.slice/crio-a427fa527290128beae266b244d9f5262ac322a68142f3276fa85f5002e8747e WatchSource:0}: Error finding container a427fa527290128beae266b244d9f5262ac322a68142f3276fa85f5002e8747e: Status 404 returned error can't find the container with id a427fa527290128beae266b244d9f5262ac322a68142f3276fa85f5002e8747e Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.160828 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.274507 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Oct 01 05:47:58 crc kubenswrapper[4661]: W1001 05:47:58.283252 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0858ffab_0d1d_422b_8ac2_abeef9ab22ed.slice/crio-0ded033aaba9bdfe356681b86bc9f40499273573b33ad7323dda683ef69e0d64 WatchSource:0}: Error finding container 0ded033aaba9bdfe356681b86bc9f40499273573b33ad7323dda683ef69e0d64: Status 404 returned error can't find the container with id 0ded033aaba9bdfe356681b86bc9f40499273573b33ad7323dda683ef69e0d64 Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.862942 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"0858ffab-0d1d-422b-8ac2-abeef9ab22ed","Type":"ContainerStarted","Data":"47b9de39339d740df1a9a6d17eb3f1ee168669e2c6448e5db083f215c2b18347"} Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.863197 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"0858ffab-0d1d-422b-8ac2-abeef9ab22ed","Type":"ContainerStarted","Data":"39ea0a3eb4394738c5db286f94b609d0b416826c8ff5b3d4b9de20dbc1cf233a"} Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.863232 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.863243 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"0858ffab-0d1d-422b-8ac2-abeef9ab22ed","Type":"ContainerStarted","Data":"0ded033aaba9bdfe356681b86bc9f40499273573b33ad7323dda683ef69e0d64"} Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.871389 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7c73747-7ab3-4328-bec7-7708a39a50a2","Type":"ContainerStarted","Data":"6721008a6e7fb7232e1bed5b29bef03ac23f9fbbb05a6184c65918c9a226b259"} Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.874408 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-558754b4b6-4khhg" event={"ID":"59481d9b-6c9f-48ac-93d1-870dbfb6edaf","Type":"ContainerStarted","Data":"d221969aeb9f6efab68f0ae7433e4a91d89f860de6b6e3426026609e22191bae"} Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.874434 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-558754b4b6-4khhg" event={"ID":"59481d9b-6c9f-48ac-93d1-870dbfb6edaf","Type":"ContainerStarted","Data":"d8e789edd85d78848c0206ca1418db3d9baa87bd0a77d9f8d5915fdc3f45c8fe"} Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.874462 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-558754b4b6-4khhg" event={"ID":"59481d9b-6c9f-48ac-93d1-870dbfb6edaf","Type":"ContainerStarted","Data":"a427fa527290128beae266b244d9f5262ac322a68142f3276fa85f5002e8747e"} Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.874563 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.874597 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.876775 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-cwwsl" event={"ID":"9ae8568a-ecc3-429d-9717-0d05cf2e52d1","Type":"ContainerStarted","Data":"684d7b6c1ff2edf9b2e601cbd480ea164bcf7a129db8d29d1b4bcfa3de60612f"} Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.879141 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12","Type":"ContainerDied","Data":"a1d7048dd092069ae85d8868a506fe36b5db28e2eac1de313effd5ef916c5c8b"} Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.879173 4661 scope.go:117] "RemoveContainer" containerID="f2b5d2a8f4e0cf1118cb15a9a02e9bae2637c7d361436b6a40f7440935d6b215" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.879221 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.879771 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.879807 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.879819 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.879829 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.885467 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=5.885453921 podStartE2EDuration="5.885453921s" podCreationTimestamp="2025-10-01 05:47:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:47:58.88212791 +0000 UTC m=+1127.820106524" watchObservedRunningTime="2025-10-01 05:47:58.885453921 +0000 UTC m=+1127.823432535" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.924375 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-558754b4b6-4khhg" podStartSLOduration=6.92435525 podStartE2EDuration="6.92435525s" podCreationTimestamp="2025-10-01 05:47:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:47:58.912817833 +0000 UTC m=+1127.850796457" watchObservedRunningTime="2025-10-01 05:47:58.92435525 +0000 UTC m=+1127.862333864" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.935147 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-cwwsl" podStartSLOduration=4.061229991 podStartE2EDuration="36.935127356s" podCreationTimestamp="2025-10-01 05:47:22 +0000 UTC" firstStartedPulling="2025-10-01 05:47:24.86104358 +0000 UTC m=+1093.799022194" lastFinishedPulling="2025-10-01 05:47:57.734940945 +0000 UTC m=+1126.672919559" observedRunningTime="2025-10-01 05:47:58.927959128 +0000 UTC m=+1127.865937752" watchObservedRunningTime="2025-10-01 05:47:58.935127356 +0000 UTC m=+1127.873105970" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.957436 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-applier-0"] Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.969925 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-applier-0"] Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.976089 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-applier-0"] Oct 01 05:47:58 crc kubenswrapper[4661]: E1001 05:47:58.976467 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12" containerName="watcher-applier" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.976486 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12" containerName="watcher-applier" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.976705 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12" containerName="watcher-applier" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.977267 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Oct 01 05:47:58 crc kubenswrapper[4661]: I1001 05:47:58.981696 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Oct 01 05:47:59 crc kubenswrapper[4661]: I1001 05:47:59.002415 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-applier-config-data" Oct 01 05:47:59 crc kubenswrapper[4661]: I1001 05:47:59.178746 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ede629-3e6b-448b-be47-77ce371d40f7-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"b6ede629-3e6b-448b-be47-77ce371d40f7\") " pod="openstack/watcher-applier-0" Oct 01 05:47:59 crc kubenswrapper[4661]: I1001 05:47:59.178805 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6ede629-3e6b-448b-be47-77ce371d40f7-config-data\") pod \"watcher-applier-0\" (UID: \"b6ede629-3e6b-448b-be47-77ce371d40f7\") " pod="openstack/watcher-applier-0" Oct 01 05:47:59 crc kubenswrapper[4661]: I1001 05:47:59.178828 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6ede629-3e6b-448b-be47-77ce371d40f7-logs\") pod \"watcher-applier-0\" (UID: \"b6ede629-3e6b-448b-be47-77ce371d40f7\") " pod="openstack/watcher-applier-0" Oct 01 05:47:59 crc kubenswrapper[4661]: I1001 05:47:59.179224 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29mmr\" (UniqueName: \"kubernetes.io/projected/b6ede629-3e6b-448b-be47-77ce371d40f7-kube-api-access-29mmr\") pod \"watcher-applier-0\" (UID: \"b6ede629-3e6b-448b-be47-77ce371d40f7\") " pod="openstack/watcher-applier-0" Oct 01 05:47:59 crc kubenswrapper[4661]: I1001 05:47:59.189211 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Oct 01 05:47:59 crc kubenswrapper[4661]: I1001 05:47:59.280660 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ede629-3e6b-448b-be47-77ce371d40f7-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"b6ede629-3e6b-448b-be47-77ce371d40f7\") " pod="openstack/watcher-applier-0" Oct 01 05:47:59 crc kubenswrapper[4661]: I1001 05:47:59.280776 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6ede629-3e6b-448b-be47-77ce371d40f7-config-data\") pod \"watcher-applier-0\" (UID: \"b6ede629-3e6b-448b-be47-77ce371d40f7\") " pod="openstack/watcher-applier-0" Oct 01 05:47:59 crc kubenswrapper[4661]: I1001 05:47:59.280827 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6ede629-3e6b-448b-be47-77ce371d40f7-logs\") pod \"watcher-applier-0\" (UID: \"b6ede629-3e6b-448b-be47-77ce371d40f7\") " pod="openstack/watcher-applier-0" Oct 01 05:47:59 crc kubenswrapper[4661]: I1001 05:47:59.281034 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29mmr\" (UniqueName: \"kubernetes.io/projected/b6ede629-3e6b-448b-be47-77ce371d40f7-kube-api-access-29mmr\") pod \"watcher-applier-0\" (UID: \"b6ede629-3e6b-448b-be47-77ce371d40f7\") " pod="openstack/watcher-applier-0" Oct 01 05:47:59 crc kubenswrapper[4661]: I1001 05:47:59.288050 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6ede629-3e6b-448b-be47-77ce371d40f7-logs\") pod \"watcher-applier-0\" (UID: \"b6ede629-3e6b-448b-be47-77ce371d40f7\") " pod="openstack/watcher-applier-0" Oct 01 05:47:59 crc kubenswrapper[4661]: I1001 05:47:59.292514 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ede629-3e6b-448b-be47-77ce371d40f7-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"b6ede629-3e6b-448b-be47-77ce371d40f7\") " pod="openstack/watcher-applier-0" Oct 01 05:47:59 crc kubenswrapper[4661]: I1001 05:47:59.298047 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6ede629-3e6b-448b-be47-77ce371d40f7-config-data\") pod \"watcher-applier-0\" (UID: \"b6ede629-3e6b-448b-be47-77ce371d40f7\") " pod="openstack/watcher-applier-0" Oct 01 05:47:59 crc kubenswrapper[4661]: I1001 05:47:59.299654 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29mmr\" (UniqueName: \"kubernetes.io/projected/b6ede629-3e6b-448b-be47-77ce371d40f7-kube-api-access-29mmr\") pod \"watcher-applier-0\" (UID: \"b6ede629-3e6b-448b-be47-77ce371d40f7\") " pod="openstack/watcher-applier-0" Oct 01 05:47:59 crc kubenswrapper[4661]: I1001 05:47:59.318087 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Oct 01 05:47:59 crc kubenswrapper[4661]: I1001 05:47:59.788850 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12" path="/var/lib/kubelet/pods/e1f4fe63-ef4f-4200-a8fe-bbe15b3a8b12/volumes" Oct 01 05:47:59 crc kubenswrapper[4661]: I1001 05:47:59.798912 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Oct 01 05:47:59 crc kubenswrapper[4661]: I1001 05:47:59.890993 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"b6ede629-3e6b-448b-be47-77ce371d40f7","Type":"ContainerStarted","Data":"8fbbfe955678490b9255b6e280be86292f321cfa8311b627c726a5056d7e8692"} Oct 01 05:48:00 crc kubenswrapper[4661]: I1001 05:48:00.899265 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 01 05:48:00 crc kubenswrapper[4661]: I1001 05:48:00.904359 4661 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 05:48:00 crc kubenswrapper[4661]: I1001 05:48:00.905570 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"b6ede629-3e6b-448b-be47-77ce371d40f7","Type":"ContainerStarted","Data":"a73271320ab42aec05457cea9dd34815360ecff2f02d709b7a598152ae45b527"} Oct 01 05:48:00 crc kubenswrapper[4661]: I1001 05:48:00.905637 4661 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 05:48:00 crc kubenswrapper[4661]: I1001 05:48:00.906093 4661 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 05:48:00 crc kubenswrapper[4661]: I1001 05:48:00.906107 4661 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 05:48:00 crc kubenswrapper[4661]: I1001 05:48:00.910128 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 01 05:48:01 crc kubenswrapper[4661]: I1001 05:48:01.072941 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 01 05:48:01 crc kubenswrapper[4661]: I1001 05:48:01.073561 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 01 05:48:01 crc kubenswrapper[4661]: I1001 05:48:01.099890 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-applier-0" podStartSLOduration=3.099867662 podStartE2EDuration="3.099867662s" podCreationTimestamp="2025-10-01 05:47:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:48:00.975945998 +0000 UTC m=+1129.913924632" watchObservedRunningTime="2025-10-01 05:48:01.099867662 +0000 UTC m=+1130.037846276" Oct 01 05:48:01 crc kubenswrapper[4661]: I1001 05:48:01.206324 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Oct 01 05:48:02 crc kubenswrapper[4661]: I1001 05:48:02.680370 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Oct 01 05:48:02 crc kubenswrapper[4661]: I1001 05:48:02.680774 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Oct 01 05:48:02 crc kubenswrapper[4661]: I1001 05:48:02.681454 4661 scope.go:117] "RemoveContainer" containerID="2c97145622cba5b71615f2f68a3014deecd6a82fa33d9c2e2fd292d7f102cb5a" Oct 01 05:48:02 crc kubenswrapper[4661]: E1001 05:48:02.681738 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 10s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(87a9e404-beb1-4f1d-a7a2-188ccdacbb81)\"" pod="openstack/watcher-decision-engine-0" podUID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" Oct 01 05:48:03 crc kubenswrapper[4661]: I1001 05:48:03.491364 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:48:03 crc kubenswrapper[4661]: I1001 05:48:03.535310 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:48:03 crc kubenswrapper[4661]: I1001 05:48:03.951719 4661 generic.go:334] "Generic (PLEG): container finished" podID="9ae8568a-ecc3-429d-9717-0d05cf2e52d1" containerID="684d7b6c1ff2edf9b2e601cbd480ea164bcf7a129db8d29d1b4bcfa3de60612f" exitCode=0 Oct 01 05:48:03 crc kubenswrapper[4661]: I1001 05:48:03.951767 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-cwwsl" event={"ID":"9ae8568a-ecc3-429d-9717-0d05cf2e52d1","Type":"ContainerDied","Data":"684d7b6c1ff2edf9b2e601cbd480ea164bcf7a129db8d29d1b4bcfa3de60612f"} Oct 01 05:48:04 crc kubenswrapper[4661]: I1001 05:48:04.189317 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-api-0" Oct 01 05:48:04 crc kubenswrapper[4661]: I1001 05:48:04.198370 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-api-0" Oct 01 05:48:04 crc kubenswrapper[4661]: I1001 05:48:04.309917 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:48:04 crc kubenswrapper[4661]: I1001 05:48:04.310045 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:48:04 crc kubenswrapper[4661]: I1001 05:48:04.310122 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 05:48:04 crc kubenswrapper[4661]: I1001 05:48:04.311429 4661 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7c6267fc47b44c8673e99a573506180a4e4a545631b58c429ea8f0fc9b008d0f"} pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 05:48:04 crc kubenswrapper[4661]: I1001 05:48:04.311546 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" containerID="cri-o://7c6267fc47b44c8673e99a573506180a4e4a545631b58c429ea8f0fc9b008d0f" gracePeriod=600 Oct 01 05:48:04 crc kubenswrapper[4661]: I1001 05:48:04.319138 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-applier-0" Oct 01 05:48:04 crc kubenswrapper[4661]: I1001 05:48:04.977560 4661 generic.go:334] "Generic (PLEG): container finished" podID="7584c4bc-4202-487e-a2b4-4319f428a792" containerID="7c6267fc47b44c8673e99a573506180a4e4a545631b58c429ea8f0fc9b008d0f" exitCode=0 Oct 01 05:48:04 crc kubenswrapper[4661]: I1001 05:48:04.978949 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerDied","Data":"7c6267fc47b44c8673e99a573506180a4e4a545631b58c429ea8f0fc9b008d0f"} Oct 01 05:48:04 crc kubenswrapper[4661]: I1001 05:48:04.978983 4661 scope.go:117] "RemoveContainer" containerID="44db5ad49645582db557448c24b5aa4a1f97fa89eaf93393d5999ec82b3f1a5e" Oct 01 05:48:04 crc kubenswrapper[4661]: I1001 05:48:04.985707 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Oct 01 05:48:05 crc kubenswrapper[4661]: I1001 05:48:05.037409 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:48:05 crc kubenswrapper[4661]: I1001 05:48:05.234272 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-77d96d88fb-5fr24" Oct 01 05:48:05 crc kubenswrapper[4661]: I1001 05:48:05.288094 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-8674487c84-nz4kb"] Oct 01 05:48:05 crc kubenswrapper[4661]: I1001 05:48:05.987781 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-8674487c84-nz4kb" podUID="5fa3d349-9844-4d00-ac96-5c59f46badfa" containerName="horizon-log" containerID="cri-o://75c7554a1930a34e735c9baff4c57858069e98b225029ba50465d4575c905cc2" gracePeriod=30 Oct 01 05:48:05 crc kubenswrapper[4661]: I1001 05:48:05.987898 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-8674487c84-nz4kb" podUID="5fa3d349-9844-4d00-ac96-5c59f46badfa" containerName="horizon" containerID="cri-o://807a61224a2c0f3f2b67c701b6c1327b09ad43c8eca957f3fda17dc18a7d7a29" gracePeriod=30 Oct 01 05:48:07 crc kubenswrapper[4661]: I1001 05:48:07.000718 4661 generic.go:334] "Generic (PLEG): container finished" podID="5fa3d349-9844-4d00-ac96-5c59f46badfa" containerID="807a61224a2c0f3f2b67c701b6c1327b09ad43c8eca957f3fda17dc18a7d7a29" exitCode=0 Oct 01 05:48:07 crc kubenswrapper[4661]: I1001 05:48:07.000787 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8674487c84-nz4kb" event={"ID":"5fa3d349-9844-4d00-ac96-5c59f46badfa","Type":"ContainerDied","Data":"807a61224a2c0f3f2b67c701b6c1327b09ad43c8eca957f3fda17dc18a7d7a29"} Oct 01 05:48:09 crc kubenswrapper[4661]: I1001 05:48:09.318959 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-applier-0" Oct 01 05:48:09 crc kubenswrapper[4661]: I1001 05:48:09.362334 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-applier-0" Oct 01 05:48:10 crc kubenswrapper[4661]: I1001 05:48:10.030187 4661 generic.go:334] "Generic (PLEG): container finished" podID="e8fdca12-5e6d-43d7-ae59-33b3a388ada4" containerID="14d34d13c845ef5fc2830f419636aab1ec2428e0e484e69af9db6473be13355a" exitCode=0 Oct 01 05:48:10 crc kubenswrapper[4661]: I1001 05:48:10.030254 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bcc6q" event={"ID":"e8fdca12-5e6d-43d7-ae59-33b3a388ada4","Type":"ContainerDied","Data":"14d34d13c845ef5fc2830f419636aab1ec2428e0e484e69af9db6473be13355a"} Oct 01 05:48:10 crc kubenswrapper[4661]: I1001 05:48:10.080353 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-applier-0" Oct 01 05:48:11 crc kubenswrapper[4661]: I1001 05:48:11.720723 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-8674487c84-nz4kb" podUID="5fa3d349-9844-4d00-ac96-5c59f46badfa" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.163:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.163:8443: connect: connection refused" Oct 01 05:48:13 crc kubenswrapper[4661]: I1001 05:48:13.011998 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-cwwsl" Oct 01 05:48:13 crc kubenswrapper[4661]: I1001 05:48:13.084723 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-cwwsl" event={"ID":"9ae8568a-ecc3-429d-9717-0d05cf2e52d1","Type":"ContainerDied","Data":"6a8d034a0bf1ef13c95a855b156085db6bc6e02e87fd11cd5d7b8a83b6cda3d7"} Oct 01 05:48:13 crc kubenswrapper[4661]: I1001 05:48:13.084806 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a8d034a0bf1ef13c95a855b156085db6bc6e02e87fd11cd5d7b8a83b6cda3d7" Oct 01 05:48:13 crc kubenswrapper[4661]: I1001 05:48:13.085014 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-cwwsl" Oct 01 05:48:13 crc kubenswrapper[4661]: I1001 05:48:13.152331 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9ae8568a-ecc3-429d-9717-0d05cf2e52d1-db-sync-config-data\") pod \"9ae8568a-ecc3-429d-9717-0d05cf2e52d1\" (UID: \"9ae8568a-ecc3-429d-9717-0d05cf2e52d1\") " Oct 01 05:48:13 crc kubenswrapper[4661]: I1001 05:48:13.152405 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nkw8h\" (UniqueName: \"kubernetes.io/projected/9ae8568a-ecc3-429d-9717-0d05cf2e52d1-kube-api-access-nkw8h\") pod \"9ae8568a-ecc3-429d-9717-0d05cf2e52d1\" (UID: \"9ae8568a-ecc3-429d-9717-0d05cf2e52d1\") " Oct 01 05:48:13 crc kubenswrapper[4661]: I1001 05:48:13.152474 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ae8568a-ecc3-429d-9717-0d05cf2e52d1-combined-ca-bundle\") pod \"9ae8568a-ecc3-429d-9717-0d05cf2e52d1\" (UID: \"9ae8568a-ecc3-429d-9717-0d05cf2e52d1\") " Oct 01 05:48:13 crc kubenswrapper[4661]: I1001 05:48:13.159706 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ae8568a-ecc3-429d-9717-0d05cf2e52d1-kube-api-access-nkw8h" (OuterVolumeSpecName: "kube-api-access-nkw8h") pod "9ae8568a-ecc3-429d-9717-0d05cf2e52d1" (UID: "9ae8568a-ecc3-429d-9717-0d05cf2e52d1"). InnerVolumeSpecName "kube-api-access-nkw8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:13 crc kubenswrapper[4661]: I1001 05:48:13.162322 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ae8568a-ecc3-429d-9717-0d05cf2e52d1-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "9ae8568a-ecc3-429d-9717-0d05cf2e52d1" (UID: "9ae8568a-ecc3-429d-9717-0d05cf2e52d1"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:13 crc kubenswrapper[4661]: I1001 05:48:13.184000 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ae8568a-ecc3-429d-9717-0d05cf2e52d1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9ae8568a-ecc3-429d-9717-0d05cf2e52d1" (UID: "9ae8568a-ecc3-429d-9717-0d05cf2e52d1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:13 crc kubenswrapper[4661]: I1001 05:48:13.254437 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nkw8h\" (UniqueName: \"kubernetes.io/projected/9ae8568a-ecc3-429d-9717-0d05cf2e52d1-kube-api-access-nkw8h\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:13 crc kubenswrapper[4661]: I1001 05:48:13.254468 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ae8568a-ecc3-429d-9717-0d05cf2e52d1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:13 crc kubenswrapper[4661]: I1001 05:48:13.254477 4661 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9ae8568a-ecc3-429d-9717-0d05cf2e52d1-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.103302 4661 generic.go:334] "Generic (PLEG): container finished" podID="60bcbef6-72b1-41e7-9871-ad2945197629" containerID="059af4a30fb40be11cf8bb11dd6e682155eb3dfaa28138617d9dad0f19f4dcb8" exitCode=137 Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.103345 4661 generic.go:334] "Generic (PLEG): container finished" podID="60bcbef6-72b1-41e7-9871-ad2945197629" containerID="6a1a61f351ed2ff739c016048c32a4ae7829bfa4bf4cc9c034ded7e4641df5e3" exitCode=137 Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.103350 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7fbf467685-6nvqq" event={"ID":"60bcbef6-72b1-41e7-9871-ad2945197629","Type":"ContainerDied","Data":"059af4a30fb40be11cf8bb11dd6e682155eb3dfaa28138617d9dad0f19f4dcb8"} Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.103399 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7fbf467685-6nvqq" event={"ID":"60bcbef6-72b1-41e7-9871-ad2945197629","Type":"ContainerDied","Data":"6a1a61f351ed2ff739c016048c32a4ae7829bfa4bf4cc9c034ded7e4641df5e3"} Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.106922 4661 generic.go:334] "Generic (PLEG): container finished" podID="337f7c79-e3bf-49ef-b783-9ac03df52fac" containerID="8870bddbd2e571f692a7e39886c80a605a6cc46613cec55b72b9effeee488e1c" exitCode=137 Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.106955 4661 generic.go:334] "Generic (PLEG): container finished" podID="337f7c79-e3bf-49ef-b783-9ac03df52fac" containerID="4a8cad912110e4bd4408804ab26bfbb9283cbda46a2fb40490eaabc1759c5958" exitCode=137 Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.106974 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5ff7f46dd9-gj8q6" event={"ID":"337f7c79-e3bf-49ef-b783-9ac03df52fac","Type":"ContainerDied","Data":"8870bddbd2e571f692a7e39886c80a605a6cc46613cec55b72b9effeee488e1c"} Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.107037 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5ff7f46dd9-gj8q6" event={"ID":"337f7c79-e3bf-49ef-b783-9ac03df52fac","Type":"ContainerDied","Data":"4a8cad912110e4bd4408804ab26bfbb9283cbda46a2fb40490eaabc1759c5958"} Oct 01 05:48:14 crc kubenswrapper[4661]: E1001 05:48:14.186711 4661 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cinder-api:current" Oct 01 05:48:14 crc kubenswrapper[4661]: E1001 05:48:14.187255 4661 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cinder-api:current" Oct 01 05:48:14 crc kubenswrapper[4661]: E1001 05:48:14.187430 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.rdoproject.org/podified-master-centos10/openstack-cinder-api:current,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dkd2h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-q2jck_openstack(77022b65-9c8f-4173-957d-0d0e457bd838): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 05:48:14 crc kubenswrapper[4661]: E1001 05:48:14.192739 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-q2jck" podUID="77022b65-9c8f-4173-957d-0d0e457bd838" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.283164 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bcc6q" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.367430 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-9964d7f68-mbqp4"] Oct 01 05:48:14 crc kubenswrapper[4661]: E1001 05:48:14.367964 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ae8568a-ecc3-429d-9717-0d05cf2e52d1" containerName="barbican-db-sync" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.367976 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ae8568a-ecc3-429d-9717-0d05cf2e52d1" containerName="barbican-db-sync" Oct 01 05:48:14 crc kubenswrapper[4661]: E1001 05:48:14.368021 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8fdca12-5e6d-43d7-ae59-33b3a388ada4" containerName="neutron-db-sync" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.368029 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8fdca12-5e6d-43d7-ae59-33b3a388ada4" containerName="neutron-db-sync" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.368207 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8fdca12-5e6d-43d7-ae59-33b3a388ada4" containerName="neutron-db-sync" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.368224 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ae8568a-ecc3-429d-9717-0d05cf2e52d1" containerName="barbican-db-sync" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.369247 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-9964d7f68-mbqp4" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.371972 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-hks7z" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.376216 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.376544 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.378179 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e8fdca12-5e6d-43d7-ae59-33b3a388ada4-config\") pod \"e8fdca12-5e6d-43d7-ae59-33b3a388ada4\" (UID: \"e8fdca12-5e6d-43d7-ae59-33b3a388ada4\") " Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.378505 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8vtr\" (UniqueName: \"kubernetes.io/projected/e8fdca12-5e6d-43d7-ae59-33b3a388ada4-kube-api-access-r8vtr\") pod \"e8fdca12-5e6d-43d7-ae59-33b3a388ada4\" (UID: \"e8fdca12-5e6d-43d7-ae59-33b3a388ada4\") " Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.378597 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8fdca12-5e6d-43d7-ae59-33b3a388ada4-combined-ca-bundle\") pod \"e8fdca12-5e6d-43d7-ae59-33b3a388ada4\" (UID: \"e8fdca12-5e6d-43d7-ae59-33b3a388ada4\") " Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.401249 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6f9cbcb89-ndbgc"] Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.415751 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6f9cbcb89-ndbgc" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.422540 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8fdca12-5e6d-43d7-ae59-33b3a388ada4-kube-api-access-r8vtr" (OuterVolumeSpecName: "kube-api-access-r8vtr") pod "e8fdca12-5e6d-43d7-ae59-33b3a388ada4" (UID: "e8fdca12-5e6d-43d7-ae59-33b3a388ada4"). InnerVolumeSpecName "kube-api-access-r8vtr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.422894 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.438290 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-9964d7f68-mbqp4"] Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.448055 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8fdca12-5e6d-43d7-ae59-33b3a388ada4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e8fdca12-5e6d-43d7-ae59-33b3a388ada4" (UID: "e8fdca12-5e6d-43d7-ae59-33b3a388ada4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.452132 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8fdca12-5e6d-43d7-ae59-33b3a388ada4-config" (OuterVolumeSpecName: "config") pod "e8fdca12-5e6d-43d7-ae59-33b3a388ada4" (UID: "e8fdca12-5e6d-43d7-ae59-33b3a388ada4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.468608 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6f9cbcb89-ndbgc"] Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.475536 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-549c96b4c7-422x9"] Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.477365 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-549c96b4c7-422x9" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.483235 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwqqm\" (UniqueName: \"kubernetes.io/projected/e5417b7f-b126-4335-ac74-b5d8f5713aee-kube-api-access-qwqqm\") pod \"barbican-keystone-listener-9964d7f68-mbqp4\" (UID: \"e5417b7f-b126-4335-ac74-b5d8f5713aee\") " pod="openstack/barbican-keystone-listener-9964d7f68-mbqp4" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.483450 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7097203a-fb10-4615-9115-97d10c5b114d-logs\") pod \"barbican-worker-6f9cbcb89-ndbgc\" (UID: \"7097203a-fb10-4615-9115-97d10c5b114d\") " pod="openstack/barbican-worker-6f9cbcb89-ndbgc" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.483491 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5d9n7\" (UniqueName: \"kubernetes.io/projected/7097203a-fb10-4615-9115-97d10c5b114d-kube-api-access-5d9n7\") pod \"barbican-worker-6f9cbcb89-ndbgc\" (UID: \"7097203a-fb10-4615-9115-97d10c5b114d\") " pod="openstack/barbican-worker-6f9cbcb89-ndbgc" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.483529 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5417b7f-b126-4335-ac74-b5d8f5713aee-config-data\") pod \"barbican-keystone-listener-9964d7f68-mbqp4\" (UID: \"e5417b7f-b126-4335-ac74-b5d8f5713aee\") " pod="openstack/barbican-keystone-listener-9964d7f68-mbqp4" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.483554 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e5417b7f-b126-4335-ac74-b5d8f5713aee-config-data-custom\") pod \"barbican-keystone-listener-9964d7f68-mbqp4\" (UID: \"e5417b7f-b126-4335-ac74-b5d8f5713aee\") " pod="openstack/barbican-keystone-listener-9964d7f68-mbqp4" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.483581 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5417b7f-b126-4335-ac74-b5d8f5713aee-combined-ca-bundle\") pod \"barbican-keystone-listener-9964d7f68-mbqp4\" (UID: \"e5417b7f-b126-4335-ac74-b5d8f5713aee\") " pod="openstack/barbican-keystone-listener-9964d7f68-mbqp4" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.483612 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e5417b7f-b126-4335-ac74-b5d8f5713aee-logs\") pod \"barbican-keystone-listener-9964d7f68-mbqp4\" (UID: \"e5417b7f-b126-4335-ac74-b5d8f5713aee\") " pod="openstack/barbican-keystone-listener-9964d7f68-mbqp4" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.483650 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7097203a-fb10-4615-9115-97d10c5b114d-config-data\") pod \"barbican-worker-6f9cbcb89-ndbgc\" (UID: \"7097203a-fb10-4615-9115-97d10c5b114d\") " pod="openstack/barbican-worker-6f9cbcb89-ndbgc" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.483719 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7097203a-fb10-4615-9115-97d10c5b114d-combined-ca-bundle\") pod \"barbican-worker-6f9cbcb89-ndbgc\" (UID: \"7097203a-fb10-4615-9115-97d10c5b114d\") " pod="openstack/barbican-worker-6f9cbcb89-ndbgc" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.483877 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7097203a-fb10-4615-9115-97d10c5b114d-config-data-custom\") pod \"barbican-worker-6f9cbcb89-ndbgc\" (UID: \"7097203a-fb10-4615-9115-97d10c5b114d\") " pod="openstack/barbican-worker-6f9cbcb89-ndbgc" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.483937 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8vtr\" (UniqueName: \"kubernetes.io/projected/e8fdca12-5e6d-43d7-ae59-33b3a388ada4-kube-api-access-r8vtr\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.483949 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8fdca12-5e6d-43d7-ae59-33b3a388ada4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.483959 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/e8fdca12-5e6d-43d7-ae59-33b3a388ada4-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.503457 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-549c96b4c7-422x9"] Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.559196 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-68c4554784-zv8tz"] Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.560896 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.567441 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.583140 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-68c4554784-zv8tz"] Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.586472 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e5417b7f-b126-4335-ac74-b5d8f5713aee-config-data-custom\") pod \"barbican-keystone-listener-9964d7f68-mbqp4\" (UID: \"e5417b7f-b126-4335-ac74-b5d8f5713aee\") " pod="openstack/barbican-keystone-listener-9964d7f68-mbqp4" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.586522 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5417b7f-b126-4335-ac74-b5d8f5713aee-combined-ca-bundle\") pod \"barbican-keystone-listener-9964d7f68-mbqp4\" (UID: \"e5417b7f-b126-4335-ac74-b5d8f5713aee\") " pod="openstack/barbican-keystone-listener-9964d7f68-mbqp4" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.586575 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e5417b7f-b126-4335-ac74-b5d8f5713aee-logs\") pod \"barbican-keystone-listener-9964d7f68-mbqp4\" (UID: \"e5417b7f-b126-4335-ac74-b5d8f5713aee\") " pod="openstack/barbican-keystone-listener-9964d7f68-mbqp4" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.586723 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7097203a-fb10-4615-9115-97d10c5b114d-config-data\") pod \"barbican-worker-6f9cbcb89-ndbgc\" (UID: \"7097203a-fb10-4615-9115-97d10c5b114d\") " pod="openstack/barbican-worker-6f9cbcb89-ndbgc" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.586840 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7097203a-fb10-4615-9115-97d10c5b114d-combined-ca-bundle\") pod \"barbican-worker-6f9cbcb89-ndbgc\" (UID: \"7097203a-fb10-4615-9115-97d10c5b114d\") " pod="openstack/barbican-worker-6f9cbcb89-ndbgc" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.587548 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-config\") pod \"dnsmasq-dns-549c96b4c7-422x9\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " pod="openstack/dnsmasq-dns-549c96b4c7-422x9" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.587580 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-dns-svc\") pod \"dnsmasq-dns-549c96b4c7-422x9\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " pod="openstack/dnsmasq-dns-549c96b4c7-422x9" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.587609 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7097203a-fb10-4615-9115-97d10c5b114d-config-data-custom\") pod \"barbican-worker-6f9cbcb89-ndbgc\" (UID: \"7097203a-fb10-4615-9115-97d10c5b114d\") " pod="openstack/barbican-worker-6f9cbcb89-ndbgc" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.587679 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-ovsdbserver-nb\") pod \"dnsmasq-dns-549c96b4c7-422x9\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " pod="openstack/dnsmasq-dns-549c96b4c7-422x9" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.587212 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e5417b7f-b126-4335-ac74-b5d8f5713aee-logs\") pod \"barbican-keystone-listener-9964d7f68-mbqp4\" (UID: \"e5417b7f-b126-4335-ac74-b5d8f5713aee\") " pod="openstack/barbican-keystone-listener-9964d7f68-mbqp4" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.587887 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-ovsdbserver-sb\") pod \"dnsmasq-dns-549c96b4c7-422x9\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " pod="openstack/dnsmasq-dns-549c96b4c7-422x9" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.587927 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwqqm\" (UniqueName: \"kubernetes.io/projected/e5417b7f-b126-4335-ac74-b5d8f5713aee-kube-api-access-qwqqm\") pod \"barbican-keystone-listener-9964d7f68-mbqp4\" (UID: \"e5417b7f-b126-4335-ac74-b5d8f5713aee\") " pod="openstack/barbican-keystone-listener-9964d7f68-mbqp4" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.588016 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7097203a-fb10-4615-9115-97d10c5b114d-logs\") pod \"barbican-worker-6f9cbcb89-ndbgc\" (UID: \"7097203a-fb10-4615-9115-97d10c5b114d\") " pod="openstack/barbican-worker-6f9cbcb89-ndbgc" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.588095 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5d9n7\" (UniqueName: \"kubernetes.io/projected/7097203a-fb10-4615-9115-97d10c5b114d-kube-api-access-5d9n7\") pod \"barbican-worker-6f9cbcb89-ndbgc\" (UID: \"7097203a-fb10-4615-9115-97d10c5b114d\") " pod="openstack/barbican-worker-6f9cbcb89-ndbgc" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.588146 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-dns-swift-storage-0\") pod \"dnsmasq-dns-549c96b4c7-422x9\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " pod="openstack/dnsmasq-dns-549c96b4c7-422x9" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.588175 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvrlv\" (UniqueName: \"kubernetes.io/projected/e88bfd3d-d4f9-4804-9dde-d83ec857788d-kube-api-access-kvrlv\") pod \"dnsmasq-dns-549c96b4c7-422x9\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " pod="openstack/dnsmasq-dns-549c96b4c7-422x9" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.588392 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5417b7f-b126-4335-ac74-b5d8f5713aee-config-data\") pod \"barbican-keystone-listener-9964d7f68-mbqp4\" (UID: \"e5417b7f-b126-4335-ac74-b5d8f5713aee\") " pod="openstack/barbican-keystone-listener-9964d7f68-mbqp4" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.588764 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7097203a-fb10-4615-9115-97d10c5b114d-logs\") pod \"barbican-worker-6f9cbcb89-ndbgc\" (UID: \"7097203a-fb10-4615-9115-97d10c5b114d\") " pod="openstack/barbican-worker-6f9cbcb89-ndbgc" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.589601 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e5417b7f-b126-4335-ac74-b5d8f5713aee-config-data-custom\") pod \"barbican-keystone-listener-9964d7f68-mbqp4\" (UID: \"e5417b7f-b126-4335-ac74-b5d8f5713aee\") " pod="openstack/barbican-keystone-listener-9964d7f68-mbqp4" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.591395 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7097203a-fb10-4615-9115-97d10c5b114d-combined-ca-bundle\") pod \"barbican-worker-6f9cbcb89-ndbgc\" (UID: \"7097203a-fb10-4615-9115-97d10c5b114d\") " pod="openstack/barbican-worker-6f9cbcb89-ndbgc" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.592394 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5417b7f-b126-4335-ac74-b5d8f5713aee-config-data\") pod \"barbican-keystone-listener-9964d7f68-mbqp4\" (UID: \"e5417b7f-b126-4335-ac74-b5d8f5713aee\") " pod="openstack/barbican-keystone-listener-9964d7f68-mbqp4" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.592753 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5417b7f-b126-4335-ac74-b5d8f5713aee-combined-ca-bundle\") pod \"barbican-keystone-listener-9964d7f68-mbqp4\" (UID: \"e5417b7f-b126-4335-ac74-b5d8f5713aee\") " pod="openstack/barbican-keystone-listener-9964d7f68-mbqp4" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.593658 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7097203a-fb10-4615-9115-97d10c5b114d-config-data\") pod \"barbican-worker-6f9cbcb89-ndbgc\" (UID: \"7097203a-fb10-4615-9115-97d10c5b114d\") " pod="openstack/barbican-worker-6f9cbcb89-ndbgc" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.593928 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7097203a-fb10-4615-9115-97d10c5b114d-config-data-custom\") pod \"barbican-worker-6f9cbcb89-ndbgc\" (UID: \"7097203a-fb10-4615-9115-97d10c5b114d\") " pod="openstack/barbican-worker-6f9cbcb89-ndbgc" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.602435 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5d9n7\" (UniqueName: \"kubernetes.io/projected/7097203a-fb10-4615-9115-97d10c5b114d-kube-api-access-5d9n7\") pod \"barbican-worker-6f9cbcb89-ndbgc\" (UID: \"7097203a-fb10-4615-9115-97d10c5b114d\") " pod="openstack/barbican-worker-6f9cbcb89-ndbgc" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.604208 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwqqm\" (UniqueName: \"kubernetes.io/projected/e5417b7f-b126-4335-ac74-b5d8f5713aee-kube-api-access-qwqqm\") pod \"barbican-keystone-listener-9964d7f68-mbqp4\" (UID: \"e5417b7f-b126-4335-ac74-b5d8f5713aee\") " pod="openstack/barbican-keystone-listener-9964d7f68-mbqp4" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.689530 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2156e51-3bd3-4a46-8ab3-55d94518eced-combined-ca-bundle\") pod \"barbican-api-68c4554784-zv8tz\" (UID: \"a2156e51-3bd3-4a46-8ab3-55d94518eced\") " pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.689582 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-ovsdbserver-sb\") pod \"dnsmasq-dns-549c96b4c7-422x9\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " pod="openstack/dnsmasq-dns-549c96b4c7-422x9" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.689646 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9bvr\" (UniqueName: \"kubernetes.io/projected/a2156e51-3bd3-4a46-8ab3-55d94518eced-kube-api-access-v9bvr\") pod \"barbican-api-68c4554784-zv8tz\" (UID: \"a2156e51-3bd3-4a46-8ab3-55d94518eced\") " pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.689667 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-dns-swift-storage-0\") pod \"dnsmasq-dns-549c96b4c7-422x9\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " pod="openstack/dnsmasq-dns-549c96b4c7-422x9" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.689687 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvrlv\" (UniqueName: \"kubernetes.io/projected/e88bfd3d-d4f9-4804-9dde-d83ec857788d-kube-api-access-kvrlv\") pod \"dnsmasq-dns-549c96b4c7-422x9\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " pod="openstack/dnsmasq-dns-549c96b4c7-422x9" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.689717 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a2156e51-3bd3-4a46-8ab3-55d94518eced-config-data-custom\") pod \"barbican-api-68c4554784-zv8tz\" (UID: \"a2156e51-3bd3-4a46-8ab3-55d94518eced\") " pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.689743 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2156e51-3bd3-4a46-8ab3-55d94518eced-logs\") pod \"barbican-api-68c4554784-zv8tz\" (UID: \"a2156e51-3bd3-4a46-8ab3-55d94518eced\") " pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.689792 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-config\") pod \"dnsmasq-dns-549c96b4c7-422x9\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " pod="openstack/dnsmasq-dns-549c96b4c7-422x9" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.689812 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-dns-svc\") pod \"dnsmasq-dns-549c96b4c7-422x9\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " pod="openstack/dnsmasq-dns-549c96b4c7-422x9" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.689844 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2156e51-3bd3-4a46-8ab3-55d94518eced-config-data\") pod \"barbican-api-68c4554784-zv8tz\" (UID: \"a2156e51-3bd3-4a46-8ab3-55d94518eced\") " pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.689876 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-ovsdbserver-nb\") pod \"dnsmasq-dns-549c96b4c7-422x9\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " pod="openstack/dnsmasq-dns-549c96b4c7-422x9" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.690691 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-ovsdbserver-nb\") pod \"dnsmasq-dns-549c96b4c7-422x9\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " pod="openstack/dnsmasq-dns-549c96b4c7-422x9" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.690926 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-ovsdbserver-sb\") pod \"dnsmasq-dns-549c96b4c7-422x9\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " pod="openstack/dnsmasq-dns-549c96b4c7-422x9" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.691333 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-config\") pod \"dnsmasq-dns-549c96b4c7-422x9\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " pod="openstack/dnsmasq-dns-549c96b4c7-422x9" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.691540 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-dns-swift-storage-0\") pod \"dnsmasq-dns-549c96b4c7-422x9\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " pod="openstack/dnsmasq-dns-549c96b4c7-422x9" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.691692 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-dns-svc\") pod \"dnsmasq-dns-549c96b4c7-422x9\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " pod="openstack/dnsmasq-dns-549c96b4c7-422x9" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.710226 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvrlv\" (UniqueName: \"kubernetes.io/projected/e88bfd3d-d4f9-4804-9dde-d83ec857788d-kube-api-access-kvrlv\") pod \"dnsmasq-dns-549c96b4c7-422x9\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " pod="openstack/dnsmasq-dns-549c96b4c7-422x9" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.791642 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2156e51-3bd3-4a46-8ab3-55d94518eced-logs\") pod \"barbican-api-68c4554784-zv8tz\" (UID: \"a2156e51-3bd3-4a46-8ab3-55d94518eced\") " pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.791994 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2156e51-3bd3-4a46-8ab3-55d94518eced-config-data\") pod \"barbican-api-68c4554784-zv8tz\" (UID: \"a2156e51-3bd3-4a46-8ab3-55d94518eced\") " pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.792026 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2156e51-3bd3-4a46-8ab3-55d94518eced-logs\") pod \"barbican-api-68c4554784-zv8tz\" (UID: \"a2156e51-3bd3-4a46-8ab3-55d94518eced\") " pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.792049 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2156e51-3bd3-4a46-8ab3-55d94518eced-combined-ca-bundle\") pod \"barbican-api-68c4554784-zv8tz\" (UID: \"a2156e51-3bd3-4a46-8ab3-55d94518eced\") " pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.792116 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9bvr\" (UniqueName: \"kubernetes.io/projected/a2156e51-3bd3-4a46-8ab3-55d94518eced-kube-api-access-v9bvr\") pod \"barbican-api-68c4554784-zv8tz\" (UID: \"a2156e51-3bd3-4a46-8ab3-55d94518eced\") " pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.792153 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a2156e51-3bd3-4a46-8ab3-55d94518eced-config-data-custom\") pod \"barbican-api-68c4554784-zv8tz\" (UID: \"a2156e51-3bd3-4a46-8ab3-55d94518eced\") " pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.796474 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a2156e51-3bd3-4a46-8ab3-55d94518eced-config-data-custom\") pod \"barbican-api-68c4554784-zv8tz\" (UID: \"a2156e51-3bd3-4a46-8ab3-55d94518eced\") " pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.796512 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2156e51-3bd3-4a46-8ab3-55d94518eced-config-data\") pod \"barbican-api-68c4554784-zv8tz\" (UID: \"a2156e51-3bd3-4a46-8ab3-55d94518eced\") " pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.796798 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2156e51-3bd3-4a46-8ab3-55d94518eced-combined-ca-bundle\") pod \"barbican-api-68c4554784-zv8tz\" (UID: \"a2156e51-3bd3-4a46-8ab3-55d94518eced\") " pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.810413 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9bvr\" (UniqueName: \"kubernetes.io/projected/a2156e51-3bd3-4a46-8ab3-55d94518eced-kube-api-access-v9bvr\") pod \"barbican-api-68c4554784-zv8tz\" (UID: \"a2156e51-3bd3-4a46-8ab3-55d94518eced\") " pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.816769 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-9964d7f68-mbqp4" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.846485 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6f9cbcb89-ndbgc" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.851442 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-549c96b4c7-422x9" Oct 01 05:48:14 crc kubenswrapper[4661]: I1001 05:48:14.880082 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.121397 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bcc6q" event={"ID":"e8fdca12-5e6d-43d7-ae59-33b3a388ada4","Type":"ContainerDied","Data":"7bd34b4a10d518e048f69aa9fbf1af80c2025a202af069f0593233df27c325d9"} Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.121427 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bcc6q" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.121457 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7bd34b4a10d518e048f69aa9fbf1af80c2025a202af069f0593233df27c325d9" Oct 01 05:48:15 crc kubenswrapper[4661]: E1001 05:48:15.124772 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-cinder-api:current\\\"\"" pod="openstack/cinder-db-sync-q2jck" podUID="77022b65-9c8f-4173-957d-0d0e457bd838" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.582334 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-549c96b4c7-422x9"] Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.598963 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84c68846bf-rr6w9"] Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.601437 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.646590 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84c68846bf-rr6w9"] Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.714993 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-ovsdbserver-sb\") pod \"dnsmasq-dns-84c68846bf-rr6w9\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.715044 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4nxh\" (UniqueName: \"kubernetes.io/projected/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-kube-api-access-h4nxh\") pod \"dnsmasq-dns-84c68846bf-rr6w9\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.715070 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-dns-swift-storage-0\") pod \"dnsmasq-dns-84c68846bf-rr6w9\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.728756 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-ovsdbserver-nb\") pod \"dnsmasq-dns-84c68846bf-rr6w9\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.729025 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-config\") pod \"dnsmasq-dns-84c68846bf-rr6w9\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.729191 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-dns-svc\") pod \"dnsmasq-dns-84c68846bf-rr6w9\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.747311 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5cdff47b98-krjm2"] Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.752319 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5cdff47b98-krjm2" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.756436 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-2qc29" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.756690 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.758865 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.759285 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.784597 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5cdff47b98-krjm2"] Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.831069 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-config\") pod \"dnsmasq-dns-84c68846bf-rr6w9\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.831116 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-combined-ca-bundle\") pod \"neutron-5cdff47b98-krjm2\" (UID: \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\") " pod="openstack/neutron-5cdff47b98-krjm2" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.831172 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-dns-svc\") pod \"dnsmasq-dns-84c68846bf-rr6w9\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.831207 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-ovsdbserver-sb\") pod \"dnsmasq-dns-84c68846bf-rr6w9\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.831226 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4nxh\" (UniqueName: \"kubernetes.io/projected/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-kube-api-access-h4nxh\") pod \"dnsmasq-dns-84c68846bf-rr6w9\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.831248 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-dns-swift-storage-0\") pod \"dnsmasq-dns-84c68846bf-rr6w9\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.831267 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-config\") pod \"neutron-5cdff47b98-krjm2\" (UID: \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\") " pod="openstack/neutron-5cdff47b98-krjm2" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.831322 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-ovsdbserver-nb\") pod \"dnsmasq-dns-84c68846bf-rr6w9\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.831354 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-httpd-config\") pod \"neutron-5cdff47b98-krjm2\" (UID: \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\") " pod="openstack/neutron-5cdff47b98-krjm2" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.831405 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-ovndb-tls-certs\") pod \"neutron-5cdff47b98-krjm2\" (UID: \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\") " pod="openstack/neutron-5cdff47b98-krjm2" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.831449 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2rrn\" (UniqueName: \"kubernetes.io/projected/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-kube-api-access-z2rrn\") pod \"neutron-5cdff47b98-krjm2\" (UID: \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\") " pod="openstack/neutron-5cdff47b98-krjm2" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.832344 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-ovsdbserver-sb\") pod \"dnsmasq-dns-84c68846bf-rr6w9\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.832415 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-ovsdbserver-nb\") pod \"dnsmasq-dns-84c68846bf-rr6w9\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.832795 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-dns-swift-storage-0\") pod \"dnsmasq-dns-84c68846bf-rr6w9\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.832789 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-config\") pod \"dnsmasq-dns-84c68846bf-rr6w9\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.832891 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-dns-svc\") pod \"dnsmasq-dns-84c68846bf-rr6w9\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.851816 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4nxh\" (UniqueName: \"kubernetes.io/projected/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-kube-api-access-h4nxh\") pod \"dnsmasq-dns-84c68846bf-rr6w9\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.933548 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-httpd-config\") pod \"neutron-5cdff47b98-krjm2\" (UID: \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\") " pod="openstack/neutron-5cdff47b98-krjm2" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.933625 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-ovndb-tls-certs\") pod \"neutron-5cdff47b98-krjm2\" (UID: \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\") " pod="openstack/neutron-5cdff47b98-krjm2" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.933673 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2rrn\" (UniqueName: \"kubernetes.io/projected/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-kube-api-access-z2rrn\") pod \"neutron-5cdff47b98-krjm2\" (UID: \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\") " pod="openstack/neutron-5cdff47b98-krjm2" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.933702 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-combined-ca-bundle\") pod \"neutron-5cdff47b98-krjm2\" (UID: \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\") " pod="openstack/neutron-5cdff47b98-krjm2" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.933772 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-config\") pod \"neutron-5cdff47b98-krjm2\" (UID: \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\") " pod="openstack/neutron-5cdff47b98-krjm2" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.936719 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-httpd-config\") pod \"neutron-5cdff47b98-krjm2\" (UID: \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\") " pod="openstack/neutron-5cdff47b98-krjm2" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.937091 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-config\") pod \"neutron-5cdff47b98-krjm2\" (UID: \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\") " pod="openstack/neutron-5cdff47b98-krjm2" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.937222 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-ovndb-tls-certs\") pod \"neutron-5cdff47b98-krjm2\" (UID: \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\") " pod="openstack/neutron-5cdff47b98-krjm2" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.940321 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-combined-ca-bundle\") pod \"neutron-5cdff47b98-krjm2\" (UID: \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\") " pod="openstack/neutron-5cdff47b98-krjm2" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.950258 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2rrn\" (UniqueName: \"kubernetes.io/projected/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-kube-api-access-z2rrn\") pod \"neutron-5cdff47b98-krjm2\" (UID: \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\") " pod="openstack/neutron-5cdff47b98-krjm2" Oct 01 05:48:15 crc kubenswrapper[4661]: I1001 05:48:15.959747 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.083133 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5cdff47b98-krjm2" Oct 01 05:48:16 crc kubenswrapper[4661]: E1001 05:48:16.278350 4661 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/ubi9/httpd-24@sha256:e91d58021b54c46883595ff66be65882de54abdb3be2ca53c4162b20d18b5f48" Oct 01 05:48:16 crc kubenswrapper[4661]: E1001 05:48:16.278508 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:proxy-httpd,Image:registry.redhat.io/ubi9/httpd-24@sha256:e91d58021b54c46883595ff66be65882de54abdb3be2ca53c4162b20d18b5f48,Command:[/usr/sbin/httpd],Args:[-DFOREGROUND],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:proxy-httpd,HostPort:0,ContainerPort:3000,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf/httpd.conf,SubPath:httpd.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf.d/ssl.conf,SubPath:ssl.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:run-httpd,ReadOnly:false,MountPath:/run/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:log-httpd,ReadOnly:false,MountPath:/var/log/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8cvpq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(a7c73747-7ab3-4328-bec7-7708a39a50a2): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 05:48:16 crc kubenswrapper[4661]: E1001 05:48:16.280018 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"proxy-httpd\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="a7c73747-7ab3-4328-bec7-7708a39a50a2" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.547210 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7fbf467685-6nvqq" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.573553 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5ff7f46dd9-gj8q6" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.647856 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jjdvn\" (UniqueName: \"kubernetes.io/projected/60bcbef6-72b1-41e7-9871-ad2945197629-kube-api-access-jjdvn\") pod \"60bcbef6-72b1-41e7-9871-ad2945197629\" (UID: \"60bcbef6-72b1-41e7-9871-ad2945197629\") " Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.647971 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/337f7c79-e3bf-49ef-b783-9ac03df52fac-logs\") pod \"337f7c79-e3bf-49ef-b783-9ac03df52fac\" (UID: \"337f7c79-e3bf-49ef-b783-9ac03df52fac\") " Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.648431 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/337f7c79-e3bf-49ef-b783-9ac03df52fac-logs" (OuterVolumeSpecName: "logs") pod "337f7c79-e3bf-49ef-b783-9ac03df52fac" (UID: "337f7c79-e3bf-49ef-b783-9ac03df52fac"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.648666 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/60bcbef6-72b1-41e7-9871-ad2945197629-horizon-secret-key\") pod \"60bcbef6-72b1-41e7-9871-ad2945197629\" (UID: \"60bcbef6-72b1-41e7-9871-ad2945197629\") " Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.648697 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60bcbef6-72b1-41e7-9871-ad2945197629-logs\") pod \"60bcbef6-72b1-41e7-9871-ad2945197629\" (UID: \"60bcbef6-72b1-41e7-9871-ad2945197629\") " Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.649151 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60bcbef6-72b1-41e7-9871-ad2945197629-logs" (OuterVolumeSpecName: "logs") pod "60bcbef6-72b1-41e7-9871-ad2945197629" (UID: "60bcbef6-72b1-41e7-9871-ad2945197629"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.649233 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/337f7c79-e3bf-49ef-b783-9ac03df52fac-config-data\") pod \"337f7c79-e3bf-49ef-b783-9ac03df52fac\" (UID: \"337f7c79-e3bf-49ef-b783-9ac03df52fac\") " Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.649312 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/337f7c79-e3bf-49ef-b783-9ac03df52fac-scripts\") pod \"337f7c79-e3bf-49ef-b783-9ac03df52fac\" (UID: \"337f7c79-e3bf-49ef-b783-9ac03df52fac\") " Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.649382 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vmd2b\" (UniqueName: \"kubernetes.io/projected/337f7c79-e3bf-49ef-b783-9ac03df52fac-kube-api-access-vmd2b\") pod \"337f7c79-e3bf-49ef-b783-9ac03df52fac\" (UID: \"337f7c79-e3bf-49ef-b783-9ac03df52fac\") " Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.649466 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/337f7c79-e3bf-49ef-b783-9ac03df52fac-horizon-secret-key\") pod \"337f7c79-e3bf-49ef-b783-9ac03df52fac\" (UID: \"337f7c79-e3bf-49ef-b783-9ac03df52fac\") " Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.649530 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/60bcbef6-72b1-41e7-9871-ad2945197629-scripts\") pod \"60bcbef6-72b1-41e7-9871-ad2945197629\" (UID: \"60bcbef6-72b1-41e7-9871-ad2945197629\") " Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.649551 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/60bcbef6-72b1-41e7-9871-ad2945197629-config-data\") pod \"60bcbef6-72b1-41e7-9871-ad2945197629\" (UID: \"60bcbef6-72b1-41e7-9871-ad2945197629\") " Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.650081 4661 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/337f7c79-e3bf-49ef-b783-9ac03df52fac-logs\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.650098 4661 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60bcbef6-72b1-41e7-9871-ad2945197629-logs\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.653365 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60bcbef6-72b1-41e7-9871-ad2945197629-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "60bcbef6-72b1-41e7-9871-ad2945197629" (UID: "60bcbef6-72b1-41e7-9871-ad2945197629"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.655970 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/337f7c79-e3bf-49ef-b783-9ac03df52fac-kube-api-access-vmd2b" (OuterVolumeSpecName: "kube-api-access-vmd2b") pod "337f7c79-e3bf-49ef-b783-9ac03df52fac" (UID: "337f7c79-e3bf-49ef-b783-9ac03df52fac"). InnerVolumeSpecName "kube-api-access-vmd2b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.656714 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/337f7c79-e3bf-49ef-b783-9ac03df52fac-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "337f7c79-e3bf-49ef-b783-9ac03df52fac" (UID: "337f7c79-e3bf-49ef-b783-9ac03df52fac"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.656835 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60bcbef6-72b1-41e7-9871-ad2945197629-kube-api-access-jjdvn" (OuterVolumeSpecName: "kube-api-access-jjdvn") pod "60bcbef6-72b1-41e7-9871-ad2945197629" (UID: "60bcbef6-72b1-41e7-9871-ad2945197629"). InnerVolumeSpecName "kube-api-access-jjdvn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.674510 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60bcbef6-72b1-41e7-9871-ad2945197629-config-data" (OuterVolumeSpecName: "config-data") pod "60bcbef6-72b1-41e7-9871-ad2945197629" (UID: "60bcbef6-72b1-41e7-9871-ad2945197629"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.675510 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/337f7c79-e3bf-49ef-b783-9ac03df52fac-scripts" (OuterVolumeSpecName: "scripts") pod "337f7c79-e3bf-49ef-b783-9ac03df52fac" (UID: "337f7c79-e3bf-49ef-b783-9ac03df52fac"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.681146 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60bcbef6-72b1-41e7-9871-ad2945197629-scripts" (OuterVolumeSpecName: "scripts") pod "60bcbef6-72b1-41e7-9871-ad2945197629" (UID: "60bcbef6-72b1-41e7-9871-ad2945197629"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.686155 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/337f7c79-e3bf-49ef-b783-9ac03df52fac-config-data" (OuterVolumeSpecName: "config-data") pod "337f7c79-e3bf-49ef-b783-9ac03df52fac" (UID: "337f7c79-e3bf-49ef-b783-9ac03df52fac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.752427 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/60bcbef6-72b1-41e7-9871-ad2945197629-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.752498 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/60bcbef6-72b1-41e7-9871-ad2945197629-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.752522 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jjdvn\" (UniqueName: \"kubernetes.io/projected/60bcbef6-72b1-41e7-9871-ad2945197629-kube-api-access-jjdvn\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.752545 4661 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/60bcbef6-72b1-41e7-9871-ad2945197629-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.752564 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/337f7c79-e3bf-49ef-b783-9ac03df52fac-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.752585 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/337f7c79-e3bf-49ef-b783-9ac03df52fac-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.752603 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vmd2b\" (UniqueName: \"kubernetes.io/projected/337f7c79-e3bf-49ef-b783-9ac03df52fac-kube-api-access-vmd2b\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.752623 4661 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/337f7c79-e3bf-49ef-b783-9ac03df52fac-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.757178 4661 scope.go:117] "RemoveContainer" containerID="2c97145622cba5b71615f2f68a3014deecd6a82fa33d9c2e2fd292d7f102cb5a" Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.771362 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-68c4554784-zv8tz"] Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.978748 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-9964d7f68-mbqp4"] Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.986586 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6f9cbcb89-ndbgc"] Oct 01 05:48:16 crc kubenswrapper[4661]: I1001 05:48:16.996623 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-549c96b4c7-422x9"] Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.177301 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84c68846bf-rr6w9"] Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.188742 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7fbf467685-6nvqq" Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.188734 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7fbf467685-6nvqq" event={"ID":"60bcbef6-72b1-41e7-9871-ad2945197629","Type":"ContainerDied","Data":"a968d9559ab95035b58d87618b755e39f127cf4619d342515817c08e242950a1"} Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.188904 4661 scope.go:117] "RemoveContainer" containerID="059af4a30fb40be11cf8bb11dd6e682155eb3dfaa28138617d9dad0f19f4dcb8" Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.191984 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68c4554784-zv8tz" event={"ID":"a2156e51-3bd3-4a46-8ab3-55d94518eced","Type":"ContainerStarted","Data":"c1745ee10c2194bcfcbca61efd97d4444aaed22411c0d5f6548df366b1300ba5"} Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.192023 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68c4554784-zv8tz" event={"ID":"a2156e51-3bd3-4a46-8ab3-55d94518eced","Type":"ContainerStarted","Data":"aa259baa7383c3508ec8862fe9654b62e5f9f12603abba412979cafb6e1b08f5"} Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.194536 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"87a9e404-beb1-4f1d-a7a2-188ccdacbb81","Type":"ContainerStarted","Data":"264f7e328e4ac57c670183a8834d2477fa48b96eef460bdc4889eab4cd54ba74"} Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.200859 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"84a53173a3a65408f44fe54c0406b8dc34eed2d6cf16c9dfba93d9e6d05a09bf"} Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.213754 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-549c96b4c7-422x9" event={"ID":"e88bfd3d-d4f9-4804-9dde-d83ec857788d","Type":"ContainerStarted","Data":"c5c179b7082dbf1d544dc34f769214c25ef1c54a98d5e0f1e4da55fb948a2288"} Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.218114 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-9964d7f68-mbqp4" event={"ID":"e5417b7f-b126-4335-ac74-b5d8f5713aee","Type":"ContainerStarted","Data":"15bd8612a8631e132a151855e0bd83160b58bf3f921036dba11ca832edc41619"} Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.233374 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5ff7f46dd9-gj8q6" event={"ID":"337f7c79-e3bf-49ef-b783-9ac03df52fac","Type":"ContainerDied","Data":"965728f85c5dc8ebd0fa1b91dad06c83bdf64c4eae759009f29413a2a663e5d2"} Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.233451 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5ff7f46dd9-gj8q6" Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.233871 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5cdff47b98-krjm2"] Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.236849 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6f9cbcb89-ndbgc" event={"ID":"7097203a-fb10-4615-9115-97d10c5b114d","Type":"ContainerStarted","Data":"a3dbcf471edbe476d6a8e20d372f4ffd33785c8ef2e85fa5271c2895bee3dfb2"} Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.237028 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a7c73747-7ab3-4328-bec7-7708a39a50a2" containerName="ceilometer-central-agent" containerID="cri-o://6893c5a00939dfa0dd0cc74b7dcb4f3cdf52457cc6c8913411782ef8fa00b680" gracePeriod=30 Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.237086 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a7c73747-7ab3-4328-bec7-7708a39a50a2" containerName="sg-core" containerID="cri-o://6721008a6e7fb7232e1bed5b29bef03ac23f9fbbb05a6184c65918c9a226b259" gracePeriod=30 Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.237060 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a7c73747-7ab3-4328-bec7-7708a39a50a2" containerName="ceilometer-notification-agent" containerID="cri-o://2a7c1118ffd4120e1f35b10e93fd86f149a026bef063b4be0b11108c7754aa68" gracePeriod=30 Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.430753 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5ff7f46dd9-gj8q6"] Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.459513 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5ff7f46dd9-gj8q6"] Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.467135 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7fbf467685-6nvqq"] Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.475962 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7fbf467685-6nvqq"] Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.593704 4661 scope.go:117] "RemoveContainer" containerID="6a1a61f351ed2ff739c016048c32a4ae7829bfa4bf4cc9c034ded7e4641df5e3" Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.770510 4661 scope.go:117] "RemoveContainer" containerID="8870bddbd2e571f692a7e39886c80a605a6cc46613cec55b72b9effeee488e1c" Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.829827 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="337f7c79-e3bf-49ef-b783-9ac03df52fac" path="/var/lib/kubelet/pods/337f7c79-e3bf-49ef-b783-9ac03df52fac/volumes" Oct 01 05:48:17 crc kubenswrapper[4661]: I1001 05:48:17.832051 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60bcbef6-72b1-41e7-9871-ad2945197629" path="/var/lib/kubelet/pods/60bcbef6-72b1-41e7-9871-ad2945197629/volumes" Oct 01 05:48:18 crc kubenswrapper[4661]: I1001 05:48:17.999456 4661 scope.go:117] "RemoveContainer" containerID="4a8cad912110e4bd4408804ab26bfbb9283cbda46a2fb40490eaabc1759c5958" Oct 01 05:48:18 crc kubenswrapper[4661]: I1001 05:48:18.254705 4661 generic.go:334] "Generic (PLEG): container finished" podID="a7c73747-7ab3-4328-bec7-7708a39a50a2" containerID="6721008a6e7fb7232e1bed5b29bef03ac23f9fbbb05a6184c65918c9a226b259" exitCode=2 Oct 01 05:48:18 crc kubenswrapper[4661]: I1001 05:48:18.254738 4661 generic.go:334] "Generic (PLEG): container finished" podID="a7c73747-7ab3-4328-bec7-7708a39a50a2" containerID="6893c5a00939dfa0dd0cc74b7dcb4f3cdf52457cc6c8913411782ef8fa00b680" exitCode=0 Oct 01 05:48:18 crc kubenswrapper[4661]: I1001 05:48:18.255349 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7c73747-7ab3-4328-bec7-7708a39a50a2","Type":"ContainerDied","Data":"6721008a6e7fb7232e1bed5b29bef03ac23f9fbbb05a6184c65918c9a226b259"} Oct 01 05:48:18 crc kubenswrapper[4661]: I1001 05:48:18.255381 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7c73747-7ab3-4328-bec7-7708a39a50a2","Type":"ContainerDied","Data":"6893c5a00939dfa0dd0cc74b7dcb4f3cdf52457cc6c8913411782ef8fa00b680"} Oct 01 05:48:18 crc kubenswrapper[4661]: I1001 05:48:18.261002 4661 generic.go:334] "Generic (PLEG): container finished" podID="97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb" containerID="b5a86118cc91b4cddaf468a76cefebbf371ca1f9bf85f5fb75cc64f20ff68571" exitCode=0 Oct 01 05:48:18 crc kubenswrapper[4661]: I1001 05:48:18.261148 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" event={"ID":"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb","Type":"ContainerDied","Data":"b5a86118cc91b4cddaf468a76cefebbf371ca1f9bf85f5fb75cc64f20ff68571"} Oct 01 05:48:18 crc kubenswrapper[4661]: I1001 05:48:18.261168 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" event={"ID":"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb","Type":"ContainerStarted","Data":"d907e361b5510b0131635085b8665463bc9ee969920335c30278d97929311611"} Oct 01 05:48:18 crc kubenswrapper[4661]: I1001 05:48:18.264804 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5cdff47b98-krjm2" event={"ID":"fb9d74e6-1011-457e-aa3a-a46bf22a7af9","Type":"ContainerStarted","Data":"9b77deecfa6dc37544873cfdd0e2391501b58c93c3b1e18a76ce1248f74e3679"} Oct 01 05:48:18 crc kubenswrapper[4661]: I1001 05:48:18.264852 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5cdff47b98-krjm2" event={"ID":"fb9d74e6-1011-457e-aa3a-a46bf22a7af9","Type":"ContainerStarted","Data":"0d310a0ac536ef88c6129c7c3dec41ff17d6c4732227fd4a212643d42a37eec7"} Oct 01 05:48:18 crc kubenswrapper[4661]: I1001 05:48:18.264861 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5cdff47b98-krjm2" event={"ID":"fb9d74e6-1011-457e-aa3a-a46bf22a7af9","Type":"ContainerStarted","Data":"035798de8b667530a600cf2be0db57721609b5ef28a3404f90ddc6a9f4b04e0e"} Oct 01 05:48:18 crc kubenswrapper[4661]: I1001 05:48:18.267133 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5cdff47b98-krjm2" Oct 01 05:48:18 crc kubenswrapper[4661]: I1001 05:48:18.275292 4661 generic.go:334] "Generic (PLEG): container finished" podID="e88bfd3d-d4f9-4804-9dde-d83ec857788d" containerID="b298480742be49d80d05099a4a6f18e1c9e4ed2669bebb21c90a78a8ce55eb8e" exitCode=0 Oct 01 05:48:18 crc kubenswrapper[4661]: I1001 05:48:18.275336 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-549c96b4c7-422x9" event={"ID":"e88bfd3d-d4f9-4804-9dde-d83ec857788d","Type":"ContainerDied","Data":"b298480742be49d80d05099a4a6f18e1c9e4ed2669bebb21c90a78a8ce55eb8e"} Oct 01 05:48:18 crc kubenswrapper[4661]: I1001 05:48:18.324501 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5cdff47b98-krjm2" podStartSLOduration=3.324481718 podStartE2EDuration="3.324481718s" podCreationTimestamp="2025-10-01 05:48:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:48:18.307264865 +0000 UTC m=+1147.245243479" watchObservedRunningTime="2025-10-01 05:48:18.324481718 +0000 UTC m=+1147.262460332" Oct 01 05:48:18 crc kubenswrapper[4661]: I1001 05:48:18.330389 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68c4554784-zv8tz" event={"ID":"a2156e51-3bd3-4a46-8ab3-55d94518eced","Type":"ContainerStarted","Data":"3ec781b6fa67c1acefc5cff6ef4ea62b86f3a091fe3e332033e8427c084f388f"} Oct 01 05:48:18 crc kubenswrapper[4661]: I1001 05:48:18.330450 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:18 crc kubenswrapper[4661]: I1001 05:48:18.330478 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:18 crc kubenswrapper[4661]: I1001 05:48:18.400070 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-68c4554784-zv8tz" podStartSLOduration=4.400049994 podStartE2EDuration="4.400049994s" podCreationTimestamp="2025-10-01 05:48:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:48:18.395264482 +0000 UTC m=+1147.333243096" watchObservedRunningTime="2025-10-01 05:48:18.400049994 +0000 UTC m=+1147.338028608" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.197330 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-549c96b4c7-422x9" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.321833 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-dns-swift-storage-0\") pod \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.321887 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-config\") pod \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.321916 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvrlv\" (UniqueName: \"kubernetes.io/projected/e88bfd3d-d4f9-4804-9dde-d83ec857788d-kube-api-access-kvrlv\") pod \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.322001 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-ovsdbserver-nb\") pod \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.322041 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-ovsdbserver-sb\") pod \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.322104 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-dns-svc\") pod \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\" (UID: \"e88bfd3d-d4f9-4804-9dde-d83ec857788d\") " Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.332527 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e88bfd3d-d4f9-4804-9dde-d83ec857788d-kube-api-access-kvrlv" (OuterVolumeSpecName: "kube-api-access-kvrlv") pod "e88bfd3d-d4f9-4804-9dde-d83ec857788d" (UID: "e88bfd3d-d4f9-4804-9dde-d83ec857788d"). InnerVolumeSpecName "kube-api-access-kvrlv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.354830 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-c77567f6f-m5g68"] Oct 01 05:48:19 crc kubenswrapper[4661]: E1001 05:48:19.355209 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="337f7c79-e3bf-49ef-b783-9ac03df52fac" containerName="horizon-log" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.355220 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="337f7c79-e3bf-49ef-b783-9ac03df52fac" containerName="horizon-log" Oct 01 05:48:19 crc kubenswrapper[4661]: E1001 05:48:19.355242 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="337f7c79-e3bf-49ef-b783-9ac03df52fac" containerName="horizon" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.355248 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="337f7c79-e3bf-49ef-b783-9ac03df52fac" containerName="horizon" Oct 01 05:48:19 crc kubenswrapper[4661]: E1001 05:48:19.355260 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60bcbef6-72b1-41e7-9871-ad2945197629" containerName="horizon-log" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.355267 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="60bcbef6-72b1-41e7-9871-ad2945197629" containerName="horizon-log" Oct 01 05:48:19 crc kubenswrapper[4661]: E1001 05:48:19.355279 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e88bfd3d-d4f9-4804-9dde-d83ec857788d" containerName="init" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.355284 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="e88bfd3d-d4f9-4804-9dde-d83ec857788d" containerName="init" Oct 01 05:48:19 crc kubenswrapper[4661]: E1001 05:48:19.355299 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60bcbef6-72b1-41e7-9871-ad2945197629" containerName="horizon" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.355304 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="60bcbef6-72b1-41e7-9871-ad2945197629" containerName="horizon" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.355469 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="60bcbef6-72b1-41e7-9871-ad2945197629" containerName="horizon-log" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.355485 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="e88bfd3d-d4f9-4804-9dde-d83ec857788d" containerName="init" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.355503 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="337f7c79-e3bf-49ef-b783-9ac03df52fac" containerName="horizon" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.355509 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="60bcbef6-72b1-41e7-9871-ad2945197629" containerName="horizon" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.355527 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="337f7c79-e3bf-49ef-b783-9ac03df52fac" containerName="horizon-log" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.356478 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.368798 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.369115 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.378156 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-c77567f6f-m5g68"] Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.381041 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e88bfd3d-d4f9-4804-9dde-d83ec857788d" (UID: "e88bfd3d-d4f9-4804-9dde-d83ec857788d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.390986 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e88bfd3d-d4f9-4804-9dde-d83ec857788d" (UID: "e88bfd3d-d4f9-4804-9dde-d83ec857788d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.392144 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e88bfd3d-d4f9-4804-9dde-d83ec857788d" (UID: "e88bfd3d-d4f9-4804-9dde-d83ec857788d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.399953 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-549c96b4c7-422x9" event={"ID":"e88bfd3d-d4f9-4804-9dde-d83ec857788d","Type":"ContainerDied","Data":"c5c179b7082dbf1d544dc34f769214c25ef1c54a98d5e0f1e4da55fb948a2288"} Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.400091 4661 scope.go:117] "RemoveContainer" containerID="b298480742be49d80d05099a4a6f18e1c9e4ed2669bebb21c90a78a8ce55eb8e" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.400279 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-549c96b4c7-422x9" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.406256 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e88bfd3d-d4f9-4804-9dde-d83ec857788d" (UID: "e88bfd3d-d4f9-4804-9dde-d83ec857788d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.414235 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-config" (OuterVolumeSpecName: "config") pod "e88bfd3d-d4f9-4804-9dde-d83ec857788d" (UID: "e88bfd3d-d4f9-4804-9dde-d83ec857788d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.424275 4661 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.424302 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.424312 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvrlv\" (UniqueName: \"kubernetes.io/projected/e88bfd3d-d4f9-4804-9dde-d83ec857788d-kube-api-access-kvrlv\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.425009 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.425025 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.425036 4661 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e88bfd3d-d4f9-4804-9dde-d83ec857788d-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.526347 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pc9f2\" (UniqueName: \"kubernetes.io/projected/35deb613-6735-4de6-ab11-50138ce73e30-kube-api-access-pc9f2\") pod \"neutron-c77567f6f-m5g68\" (UID: \"35deb613-6735-4de6-ab11-50138ce73e30\") " pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.526418 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/35deb613-6735-4de6-ab11-50138ce73e30-internal-tls-certs\") pod \"neutron-c77567f6f-m5g68\" (UID: \"35deb613-6735-4de6-ab11-50138ce73e30\") " pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.526984 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35deb613-6735-4de6-ab11-50138ce73e30-combined-ca-bundle\") pod \"neutron-c77567f6f-m5g68\" (UID: \"35deb613-6735-4de6-ab11-50138ce73e30\") " pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.527116 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/35deb613-6735-4de6-ab11-50138ce73e30-ovndb-tls-certs\") pod \"neutron-c77567f6f-m5g68\" (UID: \"35deb613-6735-4de6-ab11-50138ce73e30\") " pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.527495 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/35deb613-6735-4de6-ab11-50138ce73e30-public-tls-certs\") pod \"neutron-c77567f6f-m5g68\" (UID: \"35deb613-6735-4de6-ab11-50138ce73e30\") " pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.527657 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/35deb613-6735-4de6-ab11-50138ce73e30-httpd-config\") pod \"neutron-c77567f6f-m5g68\" (UID: \"35deb613-6735-4de6-ab11-50138ce73e30\") " pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.527683 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/35deb613-6735-4de6-ab11-50138ce73e30-config\") pod \"neutron-c77567f6f-m5g68\" (UID: \"35deb613-6735-4de6-ab11-50138ce73e30\") " pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.629509 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/35deb613-6735-4de6-ab11-50138ce73e30-internal-tls-certs\") pod \"neutron-c77567f6f-m5g68\" (UID: \"35deb613-6735-4de6-ab11-50138ce73e30\") " pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.629935 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35deb613-6735-4de6-ab11-50138ce73e30-combined-ca-bundle\") pod \"neutron-c77567f6f-m5g68\" (UID: \"35deb613-6735-4de6-ab11-50138ce73e30\") " pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.629971 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/35deb613-6735-4de6-ab11-50138ce73e30-ovndb-tls-certs\") pod \"neutron-c77567f6f-m5g68\" (UID: \"35deb613-6735-4de6-ab11-50138ce73e30\") " pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.630001 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/35deb613-6735-4de6-ab11-50138ce73e30-public-tls-certs\") pod \"neutron-c77567f6f-m5g68\" (UID: \"35deb613-6735-4de6-ab11-50138ce73e30\") " pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.630128 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/35deb613-6735-4de6-ab11-50138ce73e30-httpd-config\") pod \"neutron-c77567f6f-m5g68\" (UID: \"35deb613-6735-4de6-ab11-50138ce73e30\") " pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.630155 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/35deb613-6735-4de6-ab11-50138ce73e30-config\") pod \"neutron-c77567f6f-m5g68\" (UID: \"35deb613-6735-4de6-ab11-50138ce73e30\") " pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.630210 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pc9f2\" (UniqueName: \"kubernetes.io/projected/35deb613-6735-4de6-ab11-50138ce73e30-kube-api-access-pc9f2\") pod \"neutron-c77567f6f-m5g68\" (UID: \"35deb613-6735-4de6-ab11-50138ce73e30\") " pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.634462 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/35deb613-6735-4de6-ab11-50138ce73e30-public-tls-certs\") pod \"neutron-c77567f6f-m5g68\" (UID: \"35deb613-6735-4de6-ab11-50138ce73e30\") " pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.638824 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/35deb613-6735-4de6-ab11-50138ce73e30-internal-tls-certs\") pod \"neutron-c77567f6f-m5g68\" (UID: \"35deb613-6735-4de6-ab11-50138ce73e30\") " pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.639115 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35deb613-6735-4de6-ab11-50138ce73e30-combined-ca-bundle\") pod \"neutron-c77567f6f-m5g68\" (UID: \"35deb613-6735-4de6-ab11-50138ce73e30\") " pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.639255 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/35deb613-6735-4de6-ab11-50138ce73e30-httpd-config\") pod \"neutron-c77567f6f-m5g68\" (UID: \"35deb613-6735-4de6-ab11-50138ce73e30\") " pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.642438 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/35deb613-6735-4de6-ab11-50138ce73e30-ovndb-tls-certs\") pod \"neutron-c77567f6f-m5g68\" (UID: \"35deb613-6735-4de6-ab11-50138ce73e30\") " pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.645180 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/35deb613-6735-4de6-ab11-50138ce73e30-config\") pod \"neutron-c77567f6f-m5g68\" (UID: \"35deb613-6735-4de6-ab11-50138ce73e30\") " pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.651250 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pc9f2\" (UniqueName: \"kubernetes.io/projected/35deb613-6735-4de6-ab11-50138ce73e30-kube-api-access-pc9f2\") pod \"neutron-c77567f6f-m5g68\" (UID: \"35deb613-6735-4de6-ab11-50138ce73e30\") " pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.814725 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.841890 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-549c96b4c7-422x9"] Oct 01 05:48:19 crc kubenswrapper[4661]: I1001 05:48:19.841924 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-549c96b4c7-422x9"] Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.439148 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-c77567f6f-m5g68"] Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.447502 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-9964d7f68-mbqp4" event={"ID":"e5417b7f-b126-4335-ac74-b5d8f5713aee","Type":"ContainerStarted","Data":"62a9ec21242df9473dab00160b5a420b89a9278634121c2070c90c3af13b0cf9"} Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.447560 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-9964d7f68-mbqp4" event={"ID":"e5417b7f-b126-4335-ac74-b5d8f5713aee","Type":"ContainerStarted","Data":"036e4f19f7e361688eae3a2457faeab8a5382569e0edf5f3d75e6af151c2fe7a"} Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.464182 4661 generic.go:334] "Generic (PLEG): container finished" podID="a7c73747-7ab3-4328-bec7-7708a39a50a2" containerID="2a7c1118ffd4120e1f35b10e93fd86f149a026bef063b4be0b11108c7754aa68" exitCode=0 Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.464281 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7c73747-7ab3-4328-bec7-7708a39a50a2","Type":"ContainerDied","Data":"2a7c1118ffd4120e1f35b10e93fd86f149a026bef063b4be0b11108c7754aa68"} Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.473143 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" event={"ID":"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb","Type":"ContainerStarted","Data":"b34bfd783b18cf29a24a92500d0351aaacd9eac86fcf98629b596e31b9f4b411"} Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.473399 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.481205 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-9964d7f68-mbqp4" podStartSLOduration=4.267481482 podStartE2EDuration="6.481157003s" podCreationTimestamp="2025-10-01 05:48:14 +0000 UTC" firstStartedPulling="2025-10-01 05:48:16.990256446 +0000 UTC m=+1145.928235060" lastFinishedPulling="2025-10-01 05:48:19.203931967 +0000 UTC m=+1148.141910581" observedRunningTime="2025-10-01 05:48:20.468837646 +0000 UTC m=+1149.406816260" watchObservedRunningTime="2025-10-01 05:48:20.481157003 +0000 UTC m=+1149.419135617" Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.486844 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6f9cbcb89-ndbgc" event={"ID":"7097203a-fb10-4615-9115-97d10c5b114d","Type":"ContainerStarted","Data":"a602f41646db06708ad4988d5ca644896948968faf7f545a2846f2914cda93f9"} Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.486878 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6f9cbcb89-ndbgc" event={"ID":"7097203a-fb10-4615-9115-97d10c5b114d","Type":"ContainerStarted","Data":"5ffcd9148bee3a495e4676d007e57cc7b84b49f3c619b0ba185c74908d46d16f"} Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.519210 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" podStartSLOduration=5.519192469 podStartE2EDuration="5.519192469s" podCreationTimestamp="2025-10-01 05:48:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:48:20.491453817 +0000 UTC m=+1149.429432431" watchObservedRunningTime="2025-10-01 05:48:20.519192469 +0000 UTC m=+1149.457171083" Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.580463 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6f9cbcb89-ndbgc" podStartSLOduration=4.361152264 podStartE2EDuration="6.58044213s" podCreationTimestamp="2025-10-01 05:48:14 +0000 UTC" firstStartedPulling="2025-10-01 05:48:16.990116242 +0000 UTC m=+1145.928094856" lastFinishedPulling="2025-10-01 05:48:19.209406118 +0000 UTC m=+1148.147384722" observedRunningTime="2025-10-01 05:48:20.533926233 +0000 UTC m=+1149.471904867" watchObservedRunningTime="2025-10-01 05:48:20.58044213 +0000 UTC m=+1149.518420744" Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.689132 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.872209 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-combined-ca-bundle\") pod \"a7c73747-7ab3-4328-bec7-7708a39a50a2\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.872529 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7c73747-7ab3-4328-bec7-7708a39a50a2-run-httpd\") pod \"a7c73747-7ab3-4328-bec7-7708a39a50a2\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.872547 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7c73747-7ab3-4328-bec7-7708a39a50a2-log-httpd\") pod \"a7c73747-7ab3-4328-bec7-7708a39a50a2\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.872584 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-scripts\") pod \"a7c73747-7ab3-4328-bec7-7708a39a50a2\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.872653 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-config-data\") pod \"a7c73747-7ab3-4328-bec7-7708a39a50a2\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.872706 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-sg-core-conf-yaml\") pod \"a7c73747-7ab3-4328-bec7-7708a39a50a2\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.872765 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8cvpq\" (UniqueName: \"kubernetes.io/projected/a7c73747-7ab3-4328-bec7-7708a39a50a2-kube-api-access-8cvpq\") pod \"a7c73747-7ab3-4328-bec7-7708a39a50a2\" (UID: \"a7c73747-7ab3-4328-bec7-7708a39a50a2\") " Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.874072 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7c73747-7ab3-4328-bec7-7708a39a50a2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a7c73747-7ab3-4328-bec7-7708a39a50a2" (UID: "a7c73747-7ab3-4328-bec7-7708a39a50a2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.874266 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7c73747-7ab3-4328-bec7-7708a39a50a2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a7c73747-7ab3-4328-bec7-7708a39a50a2" (UID: "a7c73747-7ab3-4328-bec7-7708a39a50a2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.877485 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7c73747-7ab3-4328-bec7-7708a39a50a2-kube-api-access-8cvpq" (OuterVolumeSpecName: "kube-api-access-8cvpq") pod "a7c73747-7ab3-4328-bec7-7708a39a50a2" (UID: "a7c73747-7ab3-4328-bec7-7708a39a50a2"). InnerVolumeSpecName "kube-api-access-8cvpq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.877959 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-scripts" (OuterVolumeSpecName: "scripts") pod "a7c73747-7ab3-4328-bec7-7708a39a50a2" (UID: "a7c73747-7ab3-4328-bec7-7708a39a50a2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.911778 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a7c73747-7ab3-4328-bec7-7708a39a50a2" (UID: "a7c73747-7ab3-4328-bec7-7708a39a50a2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.935353 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a7c73747-7ab3-4328-bec7-7708a39a50a2" (UID: "a7c73747-7ab3-4328-bec7-7708a39a50a2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.954807 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-config-data" (OuterVolumeSpecName: "config-data") pod "a7c73747-7ab3-4328-bec7-7708a39a50a2" (UID: "a7c73747-7ab3-4328-bec7-7708a39a50a2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.975137 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.975167 4661 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7c73747-7ab3-4328-bec7-7708a39a50a2-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.975176 4661 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7c73747-7ab3-4328-bec7-7708a39a50a2-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.975186 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.975194 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.975202 4661 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7c73747-7ab3-4328-bec7-7708a39a50a2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:20 crc kubenswrapper[4661]: I1001 05:48:20.975209 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8cvpq\" (UniqueName: \"kubernetes.io/projected/a7c73747-7ab3-4328-bec7-7708a39a50a2-kube-api-access-8cvpq\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.098982 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-54f94df7b6-mhnj2"] Oct 01 05:48:21 crc kubenswrapper[4661]: E1001 05:48:21.099611 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7c73747-7ab3-4328-bec7-7708a39a50a2" containerName="sg-core" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.099639 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7c73747-7ab3-4328-bec7-7708a39a50a2" containerName="sg-core" Oct 01 05:48:21 crc kubenswrapper[4661]: E1001 05:48:21.099661 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7c73747-7ab3-4328-bec7-7708a39a50a2" containerName="ceilometer-central-agent" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.099668 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7c73747-7ab3-4328-bec7-7708a39a50a2" containerName="ceilometer-central-agent" Oct 01 05:48:21 crc kubenswrapper[4661]: E1001 05:48:21.099678 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7c73747-7ab3-4328-bec7-7708a39a50a2" containerName="ceilometer-notification-agent" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.099684 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7c73747-7ab3-4328-bec7-7708a39a50a2" containerName="ceilometer-notification-agent" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.099848 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7c73747-7ab3-4328-bec7-7708a39a50a2" containerName="sg-core" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.099865 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7c73747-7ab3-4328-bec7-7708a39a50a2" containerName="ceilometer-notification-agent" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.099878 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7c73747-7ab3-4328-bec7-7708a39a50a2" containerName="ceilometer-central-agent" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.100833 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.114310 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-54f94df7b6-mhnj2"] Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.114565 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.114749 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.280357 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70d1ade0-7d6b-4c94-a376-ef7027a47a76-config-data\") pod \"barbican-api-54f94df7b6-mhnj2\" (UID: \"70d1ade0-7d6b-4c94-a376-ef7027a47a76\") " pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.280397 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/70d1ade0-7d6b-4c94-a376-ef7027a47a76-config-data-custom\") pod \"barbican-api-54f94df7b6-mhnj2\" (UID: \"70d1ade0-7d6b-4c94-a376-ef7027a47a76\") " pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.280447 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70d1ade0-7d6b-4c94-a376-ef7027a47a76-combined-ca-bundle\") pod \"barbican-api-54f94df7b6-mhnj2\" (UID: \"70d1ade0-7d6b-4c94-a376-ef7027a47a76\") " pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.280528 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70d1ade0-7d6b-4c94-a376-ef7027a47a76-logs\") pod \"barbican-api-54f94df7b6-mhnj2\" (UID: \"70d1ade0-7d6b-4c94-a376-ef7027a47a76\") " pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.280568 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jkpx\" (UniqueName: \"kubernetes.io/projected/70d1ade0-7d6b-4c94-a376-ef7027a47a76-kube-api-access-4jkpx\") pod \"barbican-api-54f94df7b6-mhnj2\" (UID: \"70d1ade0-7d6b-4c94-a376-ef7027a47a76\") " pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.280597 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/70d1ade0-7d6b-4c94-a376-ef7027a47a76-internal-tls-certs\") pod \"barbican-api-54f94df7b6-mhnj2\" (UID: \"70d1ade0-7d6b-4c94-a376-ef7027a47a76\") " pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.280619 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/70d1ade0-7d6b-4c94-a376-ef7027a47a76-public-tls-certs\") pod \"barbican-api-54f94df7b6-mhnj2\" (UID: \"70d1ade0-7d6b-4c94-a376-ef7027a47a76\") " pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.382115 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/70d1ade0-7d6b-4c94-a376-ef7027a47a76-internal-tls-certs\") pod \"barbican-api-54f94df7b6-mhnj2\" (UID: \"70d1ade0-7d6b-4c94-a376-ef7027a47a76\") " pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.382168 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/70d1ade0-7d6b-4c94-a376-ef7027a47a76-public-tls-certs\") pod \"barbican-api-54f94df7b6-mhnj2\" (UID: \"70d1ade0-7d6b-4c94-a376-ef7027a47a76\") " pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.382213 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70d1ade0-7d6b-4c94-a376-ef7027a47a76-config-data\") pod \"barbican-api-54f94df7b6-mhnj2\" (UID: \"70d1ade0-7d6b-4c94-a376-ef7027a47a76\") " pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.382228 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/70d1ade0-7d6b-4c94-a376-ef7027a47a76-config-data-custom\") pod \"barbican-api-54f94df7b6-mhnj2\" (UID: \"70d1ade0-7d6b-4c94-a376-ef7027a47a76\") " pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.382276 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70d1ade0-7d6b-4c94-a376-ef7027a47a76-combined-ca-bundle\") pod \"barbican-api-54f94df7b6-mhnj2\" (UID: \"70d1ade0-7d6b-4c94-a376-ef7027a47a76\") " pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.382313 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70d1ade0-7d6b-4c94-a376-ef7027a47a76-logs\") pod \"barbican-api-54f94df7b6-mhnj2\" (UID: \"70d1ade0-7d6b-4c94-a376-ef7027a47a76\") " pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.382350 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jkpx\" (UniqueName: \"kubernetes.io/projected/70d1ade0-7d6b-4c94-a376-ef7027a47a76-kube-api-access-4jkpx\") pod \"barbican-api-54f94df7b6-mhnj2\" (UID: \"70d1ade0-7d6b-4c94-a376-ef7027a47a76\") " pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.385012 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70d1ade0-7d6b-4c94-a376-ef7027a47a76-logs\") pod \"barbican-api-54f94df7b6-mhnj2\" (UID: \"70d1ade0-7d6b-4c94-a376-ef7027a47a76\") " pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.391086 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/70d1ade0-7d6b-4c94-a376-ef7027a47a76-public-tls-certs\") pod \"barbican-api-54f94df7b6-mhnj2\" (UID: \"70d1ade0-7d6b-4c94-a376-ef7027a47a76\") " pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.391404 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/70d1ade0-7d6b-4c94-a376-ef7027a47a76-internal-tls-certs\") pod \"barbican-api-54f94df7b6-mhnj2\" (UID: \"70d1ade0-7d6b-4c94-a376-ef7027a47a76\") " pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.391611 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70d1ade0-7d6b-4c94-a376-ef7027a47a76-config-data\") pod \"barbican-api-54f94df7b6-mhnj2\" (UID: \"70d1ade0-7d6b-4c94-a376-ef7027a47a76\") " pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.391806 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/70d1ade0-7d6b-4c94-a376-ef7027a47a76-config-data-custom\") pod \"barbican-api-54f94df7b6-mhnj2\" (UID: \"70d1ade0-7d6b-4c94-a376-ef7027a47a76\") " pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.395031 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70d1ade0-7d6b-4c94-a376-ef7027a47a76-combined-ca-bundle\") pod \"barbican-api-54f94df7b6-mhnj2\" (UID: \"70d1ade0-7d6b-4c94-a376-ef7027a47a76\") " pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.418768 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jkpx\" (UniqueName: \"kubernetes.io/projected/70d1ade0-7d6b-4c94-a376-ef7027a47a76-kube-api-access-4jkpx\") pod \"barbican-api-54f94df7b6-mhnj2\" (UID: \"70d1ade0-7d6b-4c94-a376-ef7027a47a76\") " pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.506785 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-c77567f6f-m5g68" event={"ID":"35deb613-6735-4de6-ab11-50138ce73e30","Type":"ContainerStarted","Data":"77dbec4315e10f455e787bb4390fd1c1a07a25199bc5227511d00f6171f40ab3"} Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.507065 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-c77567f6f-m5g68" event={"ID":"35deb613-6735-4de6-ab11-50138ce73e30","Type":"ContainerStarted","Data":"d69ea517b32c3efbb41b9c3cfa622ac403f18ce947b319c204248411125f8453"} Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.507079 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-c77567f6f-m5g68" event={"ID":"35deb613-6735-4de6-ab11-50138ce73e30","Type":"ContainerStarted","Data":"2307e9ddb8ca1a4d2e5150fc411bd5f752f37833b0b5b2f37308fd0151400dfb"} Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.507116 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.509203 4661 generic.go:334] "Generic (PLEG): container finished" podID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" containerID="264f7e328e4ac57c670183a8834d2477fa48b96eef460bdc4889eab4cd54ba74" exitCode=1 Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.509243 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"87a9e404-beb1-4f1d-a7a2-188ccdacbb81","Type":"ContainerDied","Data":"264f7e328e4ac57c670183a8834d2477fa48b96eef460bdc4889eab4cd54ba74"} Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.509265 4661 scope.go:117] "RemoveContainer" containerID="2c97145622cba5b71615f2f68a3014deecd6a82fa33d9c2e2fd292d7f102cb5a" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.509556 4661 scope.go:117] "RemoveContainer" containerID="264f7e328e4ac57c670183a8834d2477fa48b96eef460bdc4889eab4cd54ba74" Oct 01 05:48:21 crc kubenswrapper[4661]: E1001 05:48:21.509907 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(87a9e404-beb1-4f1d-a7a2-188ccdacbb81)\"" pod="openstack/watcher-decision-engine-0" podUID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.514913 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7c73747-7ab3-4328-bec7-7708a39a50a2","Type":"ContainerDied","Data":"c80b010fd48f21b2a4e7c78754975049fbec743b8f1f668f8e8e0345f9c1aab2"} Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.514973 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.532449 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-c77567f6f-m5g68" podStartSLOduration=2.532435603 podStartE2EDuration="2.532435603s" podCreationTimestamp="2025-10-01 05:48:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:48:21.530073148 +0000 UTC m=+1150.468051762" watchObservedRunningTime="2025-10-01 05:48:21.532435603 +0000 UTC m=+1150.470414217" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.538892 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.583427 4661 scope.go:117] "RemoveContainer" containerID="6721008a6e7fb7232e1bed5b29bef03ac23f9fbbb05a6184c65918c9a226b259" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.655813 4661 scope.go:117] "RemoveContainer" containerID="2a7c1118ffd4120e1f35b10e93fd86f149a026bef063b4be0b11108c7754aa68" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.669138 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.675200 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.697161 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.702471 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.703944 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.708272 4661 scope.go:117] "RemoveContainer" containerID="6893c5a00939dfa0dd0cc74b7dcb4f3cdf52457cc6c8913411782ef8fa00b680" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.708475 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.708590 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.767361 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-8674487c84-nz4kb" podUID="5fa3d349-9844-4d00-ac96-5c59f46badfa" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.163:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.163:8443: connect: connection refused" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.817186 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7c73747-7ab3-4328-bec7-7708a39a50a2" path="/var/lib/kubelet/pods/a7c73747-7ab3-4328-bec7-7708a39a50a2/volumes" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.819866 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e88bfd3d-d4f9-4804-9dde-d83ec857788d" path="/var/lib/kubelet/pods/e88bfd3d-d4f9-4804-9dde-d83ec857788d/volumes" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.893318 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9ab2eb1-516b-4361-ba88-e6ea315a178d-log-httpd\") pod \"ceilometer-0\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " pod="openstack/ceilometer-0" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.893390 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-scripts\") pod \"ceilometer-0\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " pod="openstack/ceilometer-0" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.893472 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " pod="openstack/ceilometer-0" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.894304 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " pod="openstack/ceilometer-0" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.894358 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-244lk\" (UniqueName: \"kubernetes.io/projected/b9ab2eb1-516b-4361-ba88-e6ea315a178d-kube-api-access-244lk\") pod \"ceilometer-0\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " pod="openstack/ceilometer-0" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.894374 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9ab2eb1-516b-4361-ba88-e6ea315a178d-run-httpd\") pod \"ceilometer-0\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " pod="openstack/ceilometer-0" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.894404 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-config-data\") pod \"ceilometer-0\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " pod="openstack/ceilometer-0" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.995747 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9ab2eb1-516b-4361-ba88-e6ea315a178d-log-httpd\") pod \"ceilometer-0\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " pod="openstack/ceilometer-0" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.995826 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-scripts\") pod \"ceilometer-0\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " pod="openstack/ceilometer-0" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.995868 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " pod="openstack/ceilometer-0" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.995897 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " pod="openstack/ceilometer-0" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.995923 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9ab2eb1-516b-4361-ba88-e6ea315a178d-run-httpd\") pod \"ceilometer-0\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " pod="openstack/ceilometer-0" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.995938 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-244lk\" (UniqueName: \"kubernetes.io/projected/b9ab2eb1-516b-4361-ba88-e6ea315a178d-kube-api-access-244lk\") pod \"ceilometer-0\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " pod="openstack/ceilometer-0" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.995967 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-config-data\") pod \"ceilometer-0\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " pod="openstack/ceilometer-0" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.996280 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9ab2eb1-516b-4361-ba88-e6ea315a178d-log-httpd\") pod \"ceilometer-0\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " pod="openstack/ceilometer-0" Oct 01 05:48:21 crc kubenswrapper[4661]: I1001 05:48:21.996712 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9ab2eb1-516b-4361-ba88-e6ea315a178d-run-httpd\") pod \"ceilometer-0\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " pod="openstack/ceilometer-0" Oct 01 05:48:22 crc kubenswrapper[4661]: I1001 05:48:22.004226 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " pod="openstack/ceilometer-0" Oct 01 05:48:22 crc kubenswrapper[4661]: I1001 05:48:22.005563 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-scripts\") pod \"ceilometer-0\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " pod="openstack/ceilometer-0" Oct 01 05:48:22 crc kubenswrapper[4661]: I1001 05:48:22.010055 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-config-data\") pod \"ceilometer-0\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " pod="openstack/ceilometer-0" Oct 01 05:48:22 crc kubenswrapper[4661]: I1001 05:48:22.013834 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " pod="openstack/ceilometer-0" Oct 01 05:48:22 crc kubenswrapper[4661]: I1001 05:48:22.027782 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-244lk\" (UniqueName: \"kubernetes.io/projected/b9ab2eb1-516b-4361-ba88-e6ea315a178d-kube-api-access-244lk\") pod \"ceilometer-0\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " pod="openstack/ceilometer-0" Oct 01 05:48:22 crc kubenswrapper[4661]: I1001 05:48:22.078057 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 05:48:22 crc kubenswrapper[4661]: I1001 05:48:22.219831 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-54f94df7b6-mhnj2"] Oct 01 05:48:22 crc kubenswrapper[4661]: I1001 05:48:22.540218 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:48:22 crc kubenswrapper[4661]: I1001 05:48:22.545057 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54f94df7b6-mhnj2" event={"ID":"70d1ade0-7d6b-4c94-a376-ef7027a47a76","Type":"ContainerStarted","Data":"a204d06db4f580be638c5c5c9f6d4a37a30104cda72a0e8c954b1c01b553bee8"} Oct 01 05:48:22 crc kubenswrapper[4661]: I1001 05:48:22.545091 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54f94df7b6-mhnj2" event={"ID":"70d1ade0-7d6b-4c94-a376-ef7027a47a76","Type":"ContainerStarted","Data":"a82ae9f85e87c8a8003387ef2424c64e1851f7fff710f1e12c9c1be64b6a01ff"} Oct 01 05:48:22 crc kubenswrapper[4661]: I1001 05:48:22.680584 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Oct 01 05:48:22 crc kubenswrapper[4661]: I1001 05:48:22.681182 4661 scope.go:117] "RemoveContainer" containerID="264f7e328e4ac57c670183a8834d2477fa48b96eef460bdc4889eab4cd54ba74" Oct 01 05:48:22 crc kubenswrapper[4661]: I1001 05:48:22.681233 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Oct 01 05:48:22 crc kubenswrapper[4661]: I1001 05:48:22.681498 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Oct 01 05:48:22 crc kubenswrapper[4661]: E1001 05:48:22.681511 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(87a9e404-beb1-4f1d-a7a2-188ccdacbb81)\"" pod="openstack/watcher-decision-engine-0" podUID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" Oct 01 05:48:22 crc kubenswrapper[4661]: I1001 05:48:22.682482 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/watcher-decision-engine-0" Oct 01 05:48:23 crc kubenswrapper[4661]: I1001 05:48:23.566678 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54f94df7b6-mhnj2" event={"ID":"70d1ade0-7d6b-4c94-a376-ef7027a47a76","Type":"ContainerStarted","Data":"e33fc234f30fccf2c11cf4ad935729d52a4d88ab3b4e35b4e290ba4981889c5d"} Oct 01 05:48:23 crc kubenswrapper[4661]: I1001 05:48:23.567252 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:23 crc kubenswrapper[4661]: I1001 05:48:23.571854 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b9ab2eb1-516b-4361-ba88-e6ea315a178d","Type":"ContainerStarted","Data":"0fe0f667af588280d3d2de7164b9390a4b8b80773cc47739572dcd883a27727b"} Oct 01 05:48:23 crc kubenswrapper[4661]: I1001 05:48:23.571908 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b9ab2eb1-516b-4361-ba88-e6ea315a178d","Type":"ContainerStarted","Data":"ec6839ed849bf8f5685890643d00c813a809c29303c260476439654a7b4ab1bf"} Oct 01 05:48:23 crc kubenswrapper[4661]: I1001 05:48:23.571918 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b9ab2eb1-516b-4361-ba88-e6ea315a178d","Type":"ContainerStarted","Data":"3e3d5189f1733c8b74eebf3bb414c6e672408d0851277fc20b1c99c8011ad162"} Oct 01 05:48:23 crc kubenswrapper[4661]: I1001 05:48:23.572557 4661 scope.go:117] "RemoveContainer" containerID="264f7e328e4ac57c670183a8834d2477fa48b96eef460bdc4889eab4cd54ba74" Oct 01 05:48:23 crc kubenswrapper[4661]: E1001 05:48:23.572786 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(87a9e404-beb1-4f1d-a7a2-188ccdacbb81)\"" pod="openstack/watcher-decision-engine-0" podUID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" Oct 01 05:48:23 crc kubenswrapper[4661]: I1001 05:48:23.597007 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-54f94df7b6-mhnj2" podStartSLOduration=2.596988468 podStartE2EDuration="2.596988468s" podCreationTimestamp="2025-10-01 05:48:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:48:23.592157915 +0000 UTC m=+1152.530136529" watchObservedRunningTime="2025-10-01 05:48:23.596988468 +0000 UTC m=+1152.534967082" Oct 01 05:48:23 crc kubenswrapper[4661]: I1001 05:48:23.896474 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-7677654df9-tdbxq" Oct 01 05:48:24 crc kubenswrapper[4661]: I1001 05:48:24.578985 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:24 crc kubenswrapper[4661]: I1001 05:48:24.579450 4661 scope.go:117] "RemoveContainer" containerID="264f7e328e4ac57c670183a8834d2477fa48b96eef460bdc4889eab4cd54ba74" Oct 01 05:48:24 crc kubenswrapper[4661]: E1001 05:48:24.579734 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(87a9e404-beb1-4f1d-a7a2-188ccdacbb81)\"" pod="openstack/watcher-decision-engine-0" podUID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" Oct 01 05:48:25 crc kubenswrapper[4661]: I1001 05:48:25.961477 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.025437 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cf77b4997-gvt8p"] Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.026371 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" podUID="e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705" containerName="dnsmasq-dns" containerID="cri-o://25f568acfce87b192068418e4b8f2db5ffe4fb859f21397977b5a78a5bb6f559" gracePeriod=10 Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.335621 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.337198 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.339479 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-vqlpj" Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.347238 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.351717 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.358952 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.399362 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c10d9095-6d32-4b4a-8706-d06e0693ddb9-combined-ca-bundle\") pod \"openstackclient\" (UID: \"c10d9095-6d32-4b4a-8706-d06e0693ddb9\") " pod="openstack/openstackclient" Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.399467 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/c10d9095-6d32-4b4a-8706-d06e0693ddb9-openstack-config-secret\") pod \"openstackclient\" (UID: \"c10d9095-6d32-4b4a-8706-d06e0693ddb9\") " pod="openstack/openstackclient" Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.399506 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/c10d9095-6d32-4b4a-8706-d06e0693ddb9-openstack-config\") pod \"openstackclient\" (UID: \"c10d9095-6d32-4b4a-8706-d06e0693ddb9\") " pod="openstack/openstackclient" Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.399537 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2b5c5\" (UniqueName: \"kubernetes.io/projected/c10d9095-6d32-4b4a-8706-d06e0693ddb9-kube-api-access-2b5c5\") pod \"openstackclient\" (UID: \"c10d9095-6d32-4b4a-8706-d06e0693ddb9\") " pod="openstack/openstackclient" Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.502009 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/c10d9095-6d32-4b4a-8706-d06e0693ddb9-openstack-config-secret\") pod \"openstackclient\" (UID: \"c10d9095-6d32-4b4a-8706-d06e0693ddb9\") " pod="openstack/openstackclient" Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.502089 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/c10d9095-6d32-4b4a-8706-d06e0693ddb9-openstack-config\") pod \"openstackclient\" (UID: \"c10d9095-6d32-4b4a-8706-d06e0693ddb9\") " pod="openstack/openstackclient" Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.502116 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2b5c5\" (UniqueName: \"kubernetes.io/projected/c10d9095-6d32-4b4a-8706-d06e0693ddb9-kube-api-access-2b5c5\") pod \"openstackclient\" (UID: \"c10d9095-6d32-4b4a-8706-d06e0693ddb9\") " pod="openstack/openstackclient" Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.502223 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c10d9095-6d32-4b4a-8706-d06e0693ddb9-combined-ca-bundle\") pod \"openstackclient\" (UID: \"c10d9095-6d32-4b4a-8706-d06e0693ddb9\") " pod="openstack/openstackclient" Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.503188 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/c10d9095-6d32-4b4a-8706-d06e0693ddb9-openstack-config\") pod \"openstackclient\" (UID: \"c10d9095-6d32-4b4a-8706-d06e0693ddb9\") " pod="openstack/openstackclient" Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.510541 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c10d9095-6d32-4b4a-8706-d06e0693ddb9-combined-ca-bundle\") pod \"openstackclient\" (UID: \"c10d9095-6d32-4b4a-8706-d06e0693ddb9\") " pod="openstack/openstackclient" Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.511117 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/c10d9095-6d32-4b4a-8706-d06e0693ddb9-openstack-config-secret\") pod \"openstackclient\" (UID: \"c10d9095-6d32-4b4a-8706-d06e0693ddb9\") " pod="openstack/openstackclient" Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.529328 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2b5c5\" (UniqueName: \"kubernetes.io/projected/c10d9095-6d32-4b4a-8706-d06e0693ddb9-kube-api-access-2b5c5\") pod \"openstackclient\" (UID: \"c10d9095-6d32-4b4a-8706-d06e0693ddb9\") " pod="openstack/openstackclient" Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.599162 4661 generic.go:334] "Generic (PLEG): container finished" podID="e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705" containerID="25f568acfce87b192068418e4b8f2db5ffe4fb859f21397977b5a78a5bb6f559" exitCode=0 Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.599207 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" event={"ID":"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705","Type":"ContainerDied","Data":"25f568acfce87b192068418e4b8f2db5ffe4fb859f21397977b5a78a5bb6f559"} Oct 01 05:48:26 crc kubenswrapper[4661]: I1001 05:48:26.655000 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 01 05:48:27 crc kubenswrapper[4661]: I1001 05:48:27.181710 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:48:27 crc kubenswrapper[4661]: I1001 05:48:27.188412 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-558754b4b6-4khhg" Oct 01 05:48:27 crc kubenswrapper[4661]: I1001 05:48:27.486961 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:27 crc kubenswrapper[4661]: I1001 05:48:27.540095 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:27 crc kubenswrapper[4661]: I1001 05:48:27.829443 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" podUID="e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.155:5353: connect: connection refused" Oct 01 05:48:29 crc kubenswrapper[4661]: I1001 05:48:29.640790 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b9ab2eb1-516b-4361-ba88-e6ea315a178d","Type":"ContainerStarted","Data":"9ed72668162ebd6cdec98eebb5bff7b2bf7b9a94a035314df6e1c47f5a2b2939"} Oct 01 05:48:29 crc kubenswrapper[4661]: I1001 05:48:29.905530 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 01 05:48:29 crc kubenswrapper[4661]: I1001 05:48:29.976006 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.082740 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-dns-svc\") pod \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.082780 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-config\") pod \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.082814 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-ovsdbserver-sb\") pod \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.083020 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-dns-swift-storage-0\") pod \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.083044 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-ovsdbserver-nb\") pod \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.083073 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qd6f4\" (UniqueName: \"kubernetes.io/projected/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-kube-api-access-qd6f4\") pod \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\" (UID: \"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705\") " Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.090010 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-kube-api-access-qd6f4" (OuterVolumeSpecName: "kube-api-access-qd6f4") pod "e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705" (UID: "e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705"). InnerVolumeSpecName "kube-api-access-qd6f4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.154992 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705" (UID: "e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.156473 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705" (UID: "e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.160820 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705" (UID: "e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.166617 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-config" (OuterVolumeSpecName: "config") pod "e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705" (UID: "e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.183381 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705" (UID: "e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.185835 4661 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.185859 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.185869 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qd6f4\" (UniqueName: \"kubernetes.io/projected/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-kube-api-access-qd6f4\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.185880 4661 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.185889 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.185896 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.666581 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"c10d9095-6d32-4b4a-8706-d06e0693ddb9","Type":"ContainerStarted","Data":"8e47be73ce08bd3d1ae74af72b6e1a915557994a3b56dd9b8218ed7eec1279bb"} Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.671595 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" event={"ID":"e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705","Type":"ContainerDied","Data":"4a2cb94d0b5e0ef9abf294551c5e31cbcbaeb536a6e2e98cd5df7abebc1c51f5"} Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.671657 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cf77b4997-gvt8p" Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.671679 4661 scope.go:117] "RemoveContainer" containerID="25f568acfce87b192068418e4b8f2db5ffe4fb859f21397977b5a78a5bb6f559" Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.673205 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-q2jck" event={"ID":"77022b65-9c8f-4173-957d-0d0e457bd838","Type":"ContainerStarted","Data":"65668dfb53b2e53a0efbe7ba9d0323e3b0f8dec532ea046c5bc9fb5a15cb1373"} Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.709392 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-q2jck" podStartSLOduration=12.565214361 podStartE2EDuration="59.709370671s" podCreationTimestamp="2025-10-01 05:47:31 +0000 UTC" firstStartedPulling="2025-10-01 05:47:42.569646822 +0000 UTC m=+1111.507625436" lastFinishedPulling="2025-10-01 05:48:29.713803132 +0000 UTC m=+1158.651781746" observedRunningTime="2025-10-01 05:48:30.696782396 +0000 UTC m=+1159.634761020" watchObservedRunningTime="2025-10-01 05:48:30.709370671 +0000 UTC m=+1159.647349285" Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.721690 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cf77b4997-gvt8p"] Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.726991 4661 scope.go:117] "RemoveContainer" containerID="71e76f677846f83c6c32de3d9b8a14b14e370a071a06027ba1a2f98065f3c230" Oct 01 05:48:30 crc kubenswrapper[4661]: I1001 05:48:30.727581 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7cf77b4997-gvt8p"] Oct 01 05:48:31 crc kubenswrapper[4661]: I1001 05:48:31.700469 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b9ab2eb1-516b-4361-ba88-e6ea315a178d","Type":"ContainerStarted","Data":"66da55c4f4b9de89ae71b0c6bf6bd0a73ec5c3234ef99e6a7ed651d75f9459de"} Oct 01 05:48:31 crc kubenswrapper[4661]: I1001 05:48:31.700964 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 01 05:48:31 crc kubenswrapper[4661]: I1001 05:48:31.725164 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-8674487c84-nz4kb" podUID="5fa3d349-9844-4d00-ac96-5c59f46badfa" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.163:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.163:8443: connect: connection refused" Oct 01 05:48:31 crc kubenswrapper[4661]: I1001 05:48:31.725271 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:48:31 crc kubenswrapper[4661]: I1001 05:48:31.726721 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.570042627 podStartE2EDuration="10.726710788s" podCreationTimestamp="2025-10-01 05:48:21 +0000 UTC" firstStartedPulling="2025-10-01 05:48:22.571940469 +0000 UTC m=+1151.509919083" lastFinishedPulling="2025-10-01 05:48:30.72860863 +0000 UTC m=+1159.666587244" observedRunningTime="2025-10-01 05:48:31.722826402 +0000 UTC m=+1160.660805016" watchObservedRunningTime="2025-10-01 05:48:31.726710788 +0000 UTC m=+1160.664689392" Oct 01 05:48:31 crc kubenswrapper[4661]: I1001 05:48:31.774432 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705" path="/var/lib/kubelet/pods/e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705/volumes" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.369942 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-57dbcb9cbf-64x2k"] Oct 01 05:48:32 crc kubenswrapper[4661]: E1001 05:48:32.370298 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705" containerName="dnsmasq-dns" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.370315 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705" containerName="dnsmasq-dns" Oct 01 05:48:32 crc kubenswrapper[4661]: E1001 05:48:32.370347 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705" containerName="init" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.370356 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705" containerName="init" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.370597 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="e83aed5c-b6cc-4568-ab8f-f2d0c4cb1705" containerName="dnsmasq-dns" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.373735 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.382396 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-57dbcb9cbf-64x2k"] Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.412457 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.412556 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.412752 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.527046 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-internal-tls-certs\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.527111 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-config-data\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.527147 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnbtd\" (UniqueName: \"kubernetes.io/projected/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-kube-api-access-gnbtd\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.527172 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-combined-ca-bundle\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.527197 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-public-tls-certs\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.527221 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-log-httpd\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.527252 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-etc-swift\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.527274 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-run-httpd\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.628703 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-public-tls-certs\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.628761 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-log-httpd\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.628834 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-etc-swift\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.628855 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-run-httpd\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.628984 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-internal-tls-certs\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.629023 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-config-data\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.629073 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnbtd\" (UniqueName: \"kubernetes.io/projected/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-kube-api-access-gnbtd\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.629102 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-combined-ca-bundle\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.629663 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-run-httpd\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.629943 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-log-httpd\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.635619 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-combined-ca-bundle\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.636114 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-public-tls-certs\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.637109 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-etc-swift\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.638481 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-config-data\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.641942 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-internal-tls-certs\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.648276 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnbtd\" (UniqueName: \"kubernetes.io/projected/cb384d97-a4b8-4eba-ac70-0ba6843cec4e-kube-api-access-gnbtd\") pod \"swift-proxy-57dbcb9cbf-64x2k\" (UID: \"cb384d97-a4b8-4eba-ac70-0ba6843cec4e\") " pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:32 crc kubenswrapper[4661]: I1001 05:48:32.723876 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:33 crc kubenswrapper[4661]: I1001 05:48:33.150048 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:33 crc kubenswrapper[4661]: I1001 05:48:33.168947 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:48:33 crc kubenswrapper[4661]: I1001 05:48:33.181410 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-54f94df7b6-mhnj2" Oct 01 05:48:33 crc kubenswrapper[4661]: I1001 05:48:33.252220 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-68c4554784-zv8tz"] Oct 01 05:48:33 crc kubenswrapper[4661]: I1001 05:48:33.252466 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-68c4554784-zv8tz" podUID="a2156e51-3bd3-4a46-8ab3-55d94518eced" containerName="barbican-api-log" containerID="cri-o://c1745ee10c2194bcfcbca61efd97d4444aaed22411c0d5f6548df366b1300ba5" gracePeriod=30 Oct 01 05:48:33 crc kubenswrapper[4661]: I1001 05:48:33.253032 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-68c4554784-zv8tz" podUID="a2156e51-3bd3-4a46-8ab3-55d94518eced" containerName="barbican-api" containerID="cri-o://3ec781b6fa67c1acefc5cff6ef4ea62b86f3a091fe3e332033e8427c084f388f" gracePeriod=30 Oct 01 05:48:33 crc kubenswrapper[4661]: I1001 05:48:33.362769 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-57dbcb9cbf-64x2k"] Oct 01 05:48:33 crc kubenswrapper[4661]: I1001 05:48:33.746914 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-57dbcb9cbf-64x2k" event={"ID":"cb384d97-a4b8-4eba-ac70-0ba6843cec4e","Type":"ContainerStarted","Data":"c537d2083685fd97dc11ba3cc515685e373794ef823bf5afa0cb8d3e3e6cde3b"} Oct 01 05:48:33 crc kubenswrapper[4661]: I1001 05:48:33.747263 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-57dbcb9cbf-64x2k" event={"ID":"cb384d97-a4b8-4eba-ac70-0ba6843cec4e","Type":"ContainerStarted","Data":"5fbb136a342536a0fe7d9930370f34d0cb79f8d97ecf2661c50c89629c92c7ff"} Oct 01 05:48:33 crc kubenswrapper[4661]: I1001 05:48:33.759779 4661 generic.go:334] "Generic (PLEG): container finished" podID="a2156e51-3bd3-4a46-8ab3-55d94518eced" containerID="c1745ee10c2194bcfcbca61efd97d4444aaed22411c0d5f6548df366b1300ba5" exitCode=143 Oct 01 05:48:33 crc kubenswrapper[4661]: I1001 05:48:33.760052 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b9ab2eb1-516b-4361-ba88-e6ea315a178d" containerName="ceilometer-central-agent" containerID="cri-o://ec6839ed849bf8f5685890643d00c813a809c29303c260476439654a7b4ab1bf" gracePeriod=30 Oct 01 05:48:33 crc kubenswrapper[4661]: I1001 05:48:33.761318 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b9ab2eb1-516b-4361-ba88-e6ea315a178d" containerName="proxy-httpd" containerID="cri-o://66da55c4f4b9de89ae71b0c6bf6bd0a73ec5c3234ef99e6a7ed651d75f9459de" gracePeriod=30 Oct 01 05:48:33 crc kubenswrapper[4661]: I1001 05:48:33.761371 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b9ab2eb1-516b-4361-ba88-e6ea315a178d" containerName="sg-core" containerID="cri-o://9ed72668162ebd6cdec98eebb5bff7b2bf7b9a94a035314df6e1c47f5a2b2939" gracePeriod=30 Oct 01 05:48:33 crc kubenswrapper[4661]: I1001 05:48:33.761415 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b9ab2eb1-516b-4361-ba88-e6ea315a178d" containerName="ceilometer-notification-agent" containerID="cri-o://0fe0f667af588280d3d2de7164b9390a4b8b80773cc47739572dcd883a27727b" gracePeriod=30 Oct 01 05:48:33 crc kubenswrapper[4661]: I1001 05:48:33.784753 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68c4554784-zv8tz" event={"ID":"a2156e51-3bd3-4a46-8ab3-55d94518eced","Type":"ContainerDied","Data":"c1745ee10c2194bcfcbca61efd97d4444aaed22411c0d5f6548df366b1300ba5"} Oct 01 05:48:33 crc kubenswrapper[4661]: I1001 05:48:33.949015 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-q5rzr"] Oct 01 05:48:33 crc kubenswrapper[4661]: I1001 05:48:33.950938 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-q5rzr" Oct 01 05:48:33 crc kubenswrapper[4661]: I1001 05:48:33.962334 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-q5rzr"] Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.076789 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcctr\" (UniqueName: \"kubernetes.io/projected/9b4455f4-4266-4c94-a56f-5f06005485fa-kube-api-access-rcctr\") pod \"nova-api-db-create-q5rzr\" (UID: \"9b4455f4-4266-4c94-a56f-5f06005485fa\") " pod="openstack/nova-api-db-create-q5rzr" Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.179250 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcctr\" (UniqueName: \"kubernetes.io/projected/9b4455f4-4266-4c94-a56f-5f06005485fa-kube-api-access-rcctr\") pod \"nova-api-db-create-q5rzr\" (UID: \"9b4455f4-4266-4c94-a56f-5f06005485fa\") " pod="openstack/nova-api-db-create-q5rzr" Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.204191 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcctr\" (UniqueName: \"kubernetes.io/projected/9b4455f4-4266-4c94-a56f-5f06005485fa-kube-api-access-rcctr\") pod \"nova-api-db-create-q5rzr\" (UID: \"9b4455f4-4266-4c94-a56f-5f06005485fa\") " pod="openstack/nova-api-db-create-q5rzr" Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.244852 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-jmdzw"] Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.246021 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jmdzw" Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.264478 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-jmdzw"] Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.291119 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-q5rzr" Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.339712 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-qt9ck"] Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.350721 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-qt9ck" Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.383695 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-qt9ck"] Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.393889 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jczsk\" (UniqueName: \"kubernetes.io/projected/60845954-9b29-47bd-9080-790730863053-kube-api-access-jczsk\") pod \"nova-cell0-db-create-jmdzw\" (UID: \"60845954-9b29-47bd-9080-790730863053\") " pod="openstack/nova-cell0-db-create-jmdzw" Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.500881 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jczsk\" (UniqueName: \"kubernetes.io/projected/60845954-9b29-47bd-9080-790730863053-kube-api-access-jczsk\") pod \"nova-cell0-db-create-jmdzw\" (UID: \"60845954-9b29-47bd-9080-790730863053\") " pod="openstack/nova-cell0-db-create-jmdzw" Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.502965 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vl58z\" (UniqueName: \"kubernetes.io/projected/67a75668-b682-4fb4-a779-8ff578213667-kube-api-access-vl58z\") pod \"nova-cell1-db-create-qt9ck\" (UID: \"67a75668-b682-4fb4-a779-8ff578213667\") " pod="openstack/nova-cell1-db-create-qt9ck" Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.525393 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jczsk\" (UniqueName: \"kubernetes.io/projected/60845954-9b29-47bd-9080-790730863053-kube-api-access-jczsk\") pod \"nova-cell0-db-create-jmdzw\" (UID: \"60845954-9b29-47bd-9080-790730863053\") " pod="openstack/nova-cell0-db-create-jmdzw" Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.562536 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jmdzw" Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.615863 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vl58z\" (UniqueName: \"kubernetes.io/projected/67a75668-b682-4fb4-a779-8ff578213667-kube-api-access-vl58z\") pod \"nova-cell1-db-create-qt9ck\" (UID: \"67a75668-b682-4fb4-a779-8ff578213667\") " pod="openstack/nova-cell1-db-create-qt9ck" Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.646324 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vl58z\" (UniqueName: \"kubernetes.io/projected/67a75668-b682-4fb4-a779-8ff578213667-kube-api-access-vl58z\") pod \"nova-cell1-db-create-qt9ck\" (UID: \"67a75668-b682-4fb4-a779-8ff578213667\") " pod="openstack/nova-cell1-db-create-qt9ck" Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.714406 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-qt9ck" Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.757000 4661 scope.go:117] "RemoveContainer" containerID="264f7e328e4ac57c670183a8834d2477fa48b96eef460bdc4889eab4cd54ba74" Oct 01 05:48:34 crc kubenswrapper[4661]: E1001 05:48:34.757217 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(87a9e404-beb1-4f1d-a7a2-188ccdacbb81)\"" pod="openstack/watcher-decision-engine-0" podUID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.843059 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-57dbcb9cbf-64x2k" event={"ID":"cb384d97-a4b8-4eba-ac70-0ba6843cec4e","Type":"ContainerStarted","Data":"80ea3f983f0a498e25d51b9ecadac7706ed6913163a567a99b90aa9a886fe8b2"} Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.843130 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.843151 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.881469 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-57dbcb9cbf-64x2k" podStartSLOduration=2.881448131 podStartE2EDuration="2.881448131s" podCreationTimestamp="2025-10-01 05:48:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:48:34.870240183 +0000 UTC m=+1163.808218797" watchObservedRunningTime="2025-10-01 05:48:34.881448131 +0000 UTC m=+1163.819426735" Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.887415 4661 generic.go:334] "Generic (PLEG): container finished" podID="b9ab2eb1-516b-4361-ba88-e6ea315a178d" containerID="66da55c4f4b9de89ae71b0c6bf6bd0a73ec5c3234ef99e6a7ed651d75f9459de" exitCode=0 Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.887447 4661 generic.go:334] "Generic (PLEG): container finished" podID="b9ab2eb1-516b-4361-ba88-e6ea315a178d" containerID="9ed72668162ebd6cdec98eebb5bff7b2bf7b9a94a035314df6e1c47f5a2b2939" exitCode=2 Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.887455 4661 generic.go:334] "Generic (PLEG): container finished" podID="b9ab2eb1-516b-4361-ba88-e6ea315a178d" containerID="ec6839ed849bf8f5685890643d00c813a809c29303c260476439654a7b4ab1bf" exitCode=0 Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.887474 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b9ab2eb1-516b-4361-ba88-e6ea315a178d","Type":"ContainerDied","Data":"66da55c4f4b9de89ae71b0c6bf6bd0a73ec5c3234ef99e6a7ed651d75f9459de"} Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.887497 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b9ab2eb1-516b-4361-ba88-e6ea315a178d","Type":"ContainerDied","Data":"9ed72668162ebd6cdec98eebb5bff7b2bf7b9a94a035314df6e1c47f5a2b2939"} Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.887505 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b9ab2eb1-516b-4361-ba88-e6ea315a178d","Type":"ContainerDied","Data":"ec6839ed849bf8f5685890643d00c813a809c29303c260476439654a7b4ab1bf"} Oct 01 05:48:34 crc kubenswrapper[4661]: I1001 05:48:34.969830 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-q5rzr"] Oct 01 05:48:35 crc kubenswrapper[4661]: I1001 05:48:35.380115 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-jmdzw"] Oct 01 05:48:35 crc kubenswrapper[4661]: I1001 05:48:35.539910 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-qt9ck"] Oct 01 05:48:35 crc kubenswrapper[4661]: I1001 05:48:35.911911 4661 generic.go:334] "Generic (PLEG): container finished" podID="b9ab2eb1-516b-4361-ba88-e6ea315a178d" containerID="0fe0f667af588280d3d2de7164b9390a4b8b80773cc47739572dcd883a27727b" exitCode=0 Oct 01 05:48:35 crc kubenswrapper[4661]: I1001 05:48:35.911974 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b9ab2eb1-516b-4361-ba88-e6ea315a178d","Type":"ContainerDied","Data":"0fe0f667af588280d3d2de7164b9390a4b8b80773cc47739572dcd883a27727b"} Oct 01 05:48:35 crc kubenswrapper[4661]: I1001 05:48:35.921092 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-qt9ck" event={"ID":"67a75668-b682-4fb4-a779-8ff578213667","Type":"ContainerStarted","Data":"dc4ba02196981f6b876eb48f7a045055748deac37605f1798398a6fe0ce5490f"} Oct 01 05:48:35 crc kubenswrapper[4661]: I1001 05:48:35.928004 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-jmdzw" event={"ID":"60845954-9b29-47bd-9080-790730863053","Type":"ContainerStarted","Data":"20354aefd144b91d69a8b8c9f5cd08da669768d8f90a0fccc9e0c6a4ed7958c5"} Oct 01 05:48:35 crc kubenswrapper[4661]: I1001 05:48:35.928045 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-jmdzw" event={"ID":"60845954-9b29-47bd-9080-790730863053","Type":"ContainerStarted","Data":"8ead07efa476b20e700e0e9902920ff63b19ea6a3b7fc53c0543c0eb960b3425"} Oct 01 05:48:35 crc kubenswrapper[4661]: I1001 05:48:35.949934 4661 generic.go:334] "Generic (PLEG): container finished" podID="9b4455f4-4266-4c94-a56f-5f06005485fa" containerID="d04e7d70fbe3ab61b775efa625dbb36d86950d4d4c2ecdb20474c5276554728c" exitCode=0 Oct 01 05:48:35 crc kubenswrapper[4661]: I1001 05:48:35.950360 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-q5rzr" event={"ID":"9b4455f4-4266-4c94-a56f-5f06005485fa","Type":"ContainerDied","Data":"d04e7d70fbe3ab61b775efa625dbb36d86950d4d4c2ecdb20474c5276554728c"} Oct 01 05:48:35 crc kubenswrapper[4661]: I1001 05:48:35.950412 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-q5rzr" event={"ID":"9b4455f4-4266-4c94-a56f-5f06005485fa","Type":"ContainerStarted","Data":"ce37ad606ad0f69646e09cce90a19dffb151e5158a1be25825500602f24aae9d"} Oct 01 05:48:35 crc kubenswrapper[4661]: I1001 05:48:35.951382 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-jmdzw" podStartSLOduration=1.9513693330000002 podStartE2EDuration="1.951369333s" podCreationTimestamp="2025-10-01 05:48:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:48:35.950272063 +0000 UTC m=+1164.888250677" watchObservedRunningTime="2025-10-01 05:48:35.951369333 +0000 UTC m=+1164.889347957" Oct 01 05:48:36 crc kubenswrapper[4661]: W1001 05:48:36.002000 4661 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87a9e404_beb1_4f1d_a7a2_188ccdacbb81.slice/crio-conmon-264f7e328e4ac57c670183a8834d2477fa48b96eef460bdc4889eab4cd54ba74.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87a9e404_beb1_4f1d_a7a2_188ccdacbb81.slice/crio-conmon-264f7e328e4ac57c670183a8834d2477fa48b96eef460bdc4889eab4cd54ba74.scope: no such file or directory Oct 01 05:48:36 crc kubenswrapper[4661]: W1001 05:48:36.002288 4661 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda2156e51_3bd3_4a46_8ab3_55d94518eced.slice/crio-conmon-c1745ee10c2194bcfcbca61efd97d4444aaed22411c0d5f6548df366b1300ba5.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda2156e51_3bd3_4a46_8ab3_55d94518eced.slice/crio-conmon-c1745ee10c2194bcfcbca61efd97d4444aaed22411c0d5f6548df366b1300ba5.scope: no such file or directory Oct 01 05:48:36 crc kubenswrapper[4661]: W1001 05:48:36.002334 4661 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87a9e404_beb1_4f1d_a7a2_188ccdacbb81.slice/crio-264f7e328e4ac57c670183a8834d2477fa48b96eef460bdc4889eab4cd54ba74.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87a9e404_beb1_4f1d_a7a2_188ccdacbb81.slice/crio-264f7e328e4ac57c670183a8834d2477fa48b96eef460bdc4889eab4cd54ba74.scope: no such file or directory Oct 01 05:48:36 crc kubenswrapper[4661]: W1001 05:48:36.002360 4661 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda2156e51_3bd3_4a46_8ab3_55d94518eced.slice/crio-c1745ee10c2194bcfcbca61efd97d4444aaed22411c0d5f6548df366b1300ba5.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda2156e51_3bd3_4a46_8ab3_55d94518eced.slice/crio-c1745ee10c2194bcfcbca61efd97d4444aaed22411c0d5f6548df366b1300ba5.scope: no such file or directory Oct 01 05:48:36 crc kubenswrapper[4661]: W1001 05:48:36.002556 4661 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode88bfd3d_d4f9_4804_9dde_d83ec857788d.slice/crio-c5c179b7082dbf1d544dc34f769214c25ef1c54a98d5e0f1e4da55fb948a2288": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode88bfd3d_d4f9_4804_9dde_d83ec857788d.slice/crio-c5c179b7082dbf1d544dc34f769214c25ef1c54a98d5e0f1e4da55fb948a2288: no such file or directory Oct 01 05:48:36 crc kubenswrapper[4661]: W1001 05:48:36.016866 4661 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode88bfd3d_d4f9_4804_9dde_d83ec857788d.slice/crio-conmon-b298480742be49d80d05099a4a6f18e1c9e4ed2669bebb21c90a78a8ce55eb8e.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode88bfd3d_d4f9_4804_9dde_d83ec857788d.slice/crio-conmon-b298480742be49d80d05099a4a6f18e1c9e4ed2669bebb21c90a78a8ce55eb8e.scope: no such file or directory Oct 01 05:48:36 crc kubenswrapper[4661]: W1001 05:48:36.016924 4661 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97dd6a7e_6da4_4f86_bf48_4bb9166cc5fb.slice/crio-conmon-b5a86118cc91b4cddaf468a76cefebbf371ca1f9bf85f5fb75cc64f20ff68571.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97dd6a7e_6da4_4f86_bf48_4bb9166cc5fb.slice/crio-conmon-b5a86118cc91b4cddaf468a76cefebbf371ca1f9bf85f5fb75cc64f20ff68571.scope: no such file or directory Oct 01 05:48:36 crc kubenswrapper[4661]: W1001 05:48:36.017071 4661 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97dd6a7e_6da4_4f86_bf48_4bb9166cc5fb.slice/crio-b5a86118cc91b4cddaf468a76cefebbf371ca1f9bf85f5fb75cc64f20ff68571.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97dd6a7e_6da4_4f86_bf48_4bb9166cc5fb.slice/crio-b5a86118cc91b4cddaf468a76cefebbf371ca1f9bf85f5fb75cc64f20ff68571.scope: no such file or directory Oct 01 05:48:36 crc kubenswrapper[4661]: W1001 05:48:36.017085 4661 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode88bfd3d_d4f9_4804_9dde_d83ec857788d.slice/crio-b298480742be49d80d05099a4a6f18e1c9e4ed2669bebb21c90a78a8ce55eb8e.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode88bfd3d_d4f9_4804_9dde_d83ec857788d.slice/crio-b298480742be49d80d05099a4a6f18e1c9e4ed2669bebb21c90a78a8ce55eb8e.scope: no such file or directory Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.307532 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.391588 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-68c4554784-zv8tz" podUID="a2156e51-3bd3-4a46-8ab3-55d94518eced" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.177:9311/healthcheck\": read tcp 10.217.0.2:42166->10.217.0.177:9311: read: connection reset by peer" Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.391756 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-68c4554784-zv8tz" podUID="a2156e51-3bd3-4a46-8ab3-55d94518eced" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.177:9311/healthcheck\": read tcp 10.217.0.2:42158->10.217.0.177:9311: read: connection reset by peer" Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.469179 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-sg-core-conf-yaml\") pod \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.469247 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-scripts\") pod \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.469321 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9ab2eb1-516b-4361-ba88-e6ea315a178d-log-httpd\") pod \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.469363 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9ab2eb1-516b-4361-ba88-e6ea315a178d-run-httpd\") pod \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.469416 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-combined-ca-bundle\") pod \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.469441 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-config-data\") pod \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.469486 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-244lk\" (UniqueName: \"kubernetes.io/projected/b9ab2eb1-516b-4361-ba88-e6ea315a178d-kube-api-access-244lk\") pod \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\" (UID: \"b9ab2eb1-516b-4361-ba88-e6ea315a178d\") " Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.474967 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9ab2eb1-516b-4361-ba88-e6ea315a178d-kube-api-access-244lk" (OuterVolumeSpecName: "kube-api-access-244lk") pod "b9ab2eb1-516b-4361-ba88-e6ea315a178d" (UID: "b9ab2eb1-516b-4361-ba88-e6ea315a178d"). InnerVolumeSpecName "kube-api-access-244lk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.477031 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9ab2eb1-516b-4361-ba88-e6ea315a178d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b9ab2eb1-516b-4361-ba88-e6ea315a178d" (UID: "b9ab2eb1-516b-4361-ba88-e6ea315a178d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.486039 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9ab2eb1-516b-4361-ba88-e6ea315a178d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b9ab2eb1-516b-4361-ba88-e6ea315a178d" (UID: "b9ab2eb1-516b-4361-ba88-e6ea315a178d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.490716 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-scripts" (OuterVolumeSpecName: "scripts") pod "b9ab2eb1-516b-4361-ba88-e6ea315a178d" (UID: "b9ab2eb1-516b-4361-ba88-e6ea315a178d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.513584 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b9ab2eb1-516b-4361-ba88-e6ea315a178d" (UID: "b9ab2eb1-516b-4361-ba88-e6ea315a178d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.572405 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.572430 4661 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9ab2eb1-516b-4361-ba88-e6ea315a178d-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.572439 4661 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9ab2eb1-516b-4361-ba88-e6ea315a178d-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.572448 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-244lk\" (UniqueName: \"kubernetes.io/projected/b9ab2eb1-516b-4361-ba88-e6ea315a178d-kube-api-access-244lk\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.572456 4661 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.599671 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b9ab2eb1-516b-4361-ba88-e6ea315a178d" (UID: "b9ab2eb1-516b-4361-ba88-e6ea315a178d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.657832 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-config-data" (OuterVolumeSpecName: "config-data") pod "b9ab2eb1-516b-4361-ba88-e6ea315a178d" (UID: "b9ab2eb1-516b-4361-ba88-e6ea315a178d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.673752 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.673782 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9ab2eb1-516b-4361-ba88-e6ea315a178d-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:36 crc kubenswrapper[4661]: E1001 05:48:36.772013 4661 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7c73747_7ab3_4328_bec7_7708a39a50a2.slice/crio-6721008a6e7fb7232e1bed5b29bef03ac23f9fbbb05a6184c65918c9a226b259.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod60bcbef6_72b1_41e7_9871_ad2945197629.slice/crio-a968d9559ab95035b58d87618b755e39f127cf4619d342515817c08e242950a1\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8fdca12_5e6d_43d7_ae59_33b3a388ada4.slice/crio-7bd34b4a10d518e048f69aa9fbf1af80c2025a202af069f0593233df27c325d9\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod60bcbef6_72b1_41e7_9871_ad2945197629.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode83aed5c_b6cc_4568_ab8f_f2d0c4cb1705.slice/crio-25f568acfce87b192068418e4b8f2db5ffe4fb859f21397977b5a78a5bb6f559.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod337f7c79_e3bf_49ef_b783_9ac03df52fac.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7c73747_7ab3_4328_bec7_7708a39a50a2.slice/crio-conmon-6893c5a00939dfa0dd0cc74b7dcb4f3cdf52457cc6c8913411782ef8fa00b680.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7c73747_7ab3_4328_bec7_7708a39a50a2.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod337f7c79_e3bf_49ef_b783_9ac03df52fac.slice/crio-965728f85c5dc8ebd0fa1b91dad06c83bdf64c4eae759009f29413a2a663e5d2\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode88bfd3d_d4f9_4804_9dde_d83ec857788d.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7c73747_7ab3_4328_bec7_7708a39a50a2.slice/crio-6893c5a00939dfa0dd0cc74b7dcb4f3cdf52457cc6c8913411782ef8fa00b680.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode83aed5c_b6cc_4568_ab8f_f2d0c4cb1705.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7c73747_7ab3_4328_bec7_7708a39a50a2.slice/crio-conmon-6721008a6e7fb7232e1bed5b29bef03ac23f9fbbb05a6184c65918c9a226b259.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7c73747_7ab3_4328_bec7_7708a39a50a2.slice/crio-2a7c1118ffd4120e1f35b10e93fd86f149a026bef063b4be0b11108c7754aa68.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5fa3d349_9844_4d00_ac96_5c59f46badfa.slice/crio-conmon-75c7554a1930a34e735c9baff4c57858069e98b225029ba50465d4575c905cc2.scope\": RecentStats: unable to find data in memory cache]" Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.974860 4661 generic.go:334] "Generic (PLEG): container finished" podID="5fa3d349-9844-4d00-ac96-5c59f46badfa" containerID="75c7554a1930a34e735c9baff4c57858069e98b225029ba50465d4575c905cc2" exitCode=137 Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.974924 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8674487c84-nz4kb" event={"ID":"5fa3d349-9844-4d00-ac96-5c59f46badfa","Type":"ContainerDied","Data":"75c7554a1930a34e735c9baff4c57858069e98b225029ba50465d4575c905cc2"} Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.974948 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8674487c84-nz4kb" event={"ID":"5fa3d349-9844-4d00-ac96-5c59f46badfa","Type":"ContainerDied","Data":"5446aae209cd07b6cd0244dd4d3774b4a851478ad5ab1be2b23ad5d63d079b54"} Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.974959 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5446aae209cd07b6cd0244dd4d3774b4a851478ad5ab1be2b23ad5d63d079b54" Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.979232 4661 generic.go:334] "Generic (PLEG): container finished" podID="67a75668-b682-4fb4-a779-8ff578213667" containerID="88b6d9d89366ef096b51bc9bed89254ab2357e9dd7d7ca98ae93f11cbc11d9b9" exitCode=0 Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.979265 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-qt9ck" event={"ID":"67a75668-b682-4fb4-a779-8ff578213667","Type":"ContainerDied","Data":"88b6d9d89366ef096b51bc9bed89254ab2357e9dd7d7ca98ae93f11cbc11d9b9"} Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.988850 4661 generic.go:334] "Generic (PLEG): container finished" podID="60845954-9b29-47bd-9080-790730863053" containerID="20354aefd144b91d69a8b8c9f5cd08da669768d8f90a0fccc9e0c6a4ed7958c5" exitCode=0 Oct 01 05:48:36 crc kubenswrapper[4661]: I1001 05:48:36.988904 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-jmdzw" event={"ID":"60845954-9b29-47bd-9080-790730863053","Type":"ContainerDied","Data":"20354aefd144b91d69a8b8c9f5cd08da669768d8f90a0fccc9e0c6a4ed7958c5"} Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.001801 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b9ab2eb1-516b-4361-ba88-e6ea315a178d","Type":"ContainerDied","Data":"3e3d5189f1733c8b74eebf3bb414c6e672408d0851277fc20b1c99c8011ad162"} Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.002033 4661 scope.go:117] "RemoveContainer" containerID="66da55c4f4b9de89ae71b0c6bf6bd0a73ec5c3234ef99e6a7ed651d75f9459de" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.002193 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.010314 4661 generic.go:334] "Generic (PLEG): container finished" podID="a2156e51-3bd3-4a46-8ab3-55d94518eced" containerID="3ec781b6fa67c1acefc5cff6ef4ea62b86f3a091fe3e332033e8427c084f388f" exitCode=0 Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.010486 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68c4554784-zv8tz" event={"ID":"a2156e51-3bd3-4a46-8ab3-55d94518eced","Type":"ContainerDied","Data":"3ec781b6fa67c1acefc5cff6ef4ea62b86f3a091fe3e332033e8427c084f388f"} Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.116717 4661 scope.go:117] "RemoveContainer" containerID="9ed72668162ebd6cdec98eebb5bff7b2bf7b9a94a035314df6e1c47f5a2b2939" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.128462 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.134825 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.148844 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.149930 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.176261 4661 scope.go:117] "RemoveContainer" containerID="0fe0f667af588280d3d2de7164b9390a4b8b80773cc47739572dcd883a27727b" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.211760 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:48:37 crc kubenswrapper[4661]: E1001 05:48:37.212206 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ab2eb1-516b-4361-ba88-e6ea315a178d" containerName="ceilometer-notification-agent" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.212224 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ab2eb1-516b-4361-ba88-e6ea315a178d" containerName="ceilometer-notification-agent" Oct 01 05:48:37 crc kubenswrapper[4661]: E1001 05:48:37.212242 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2156e51-3bd3-4a46-8ab3-55d94518eced" containerName="barbican-api-log" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.212249 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2156e51-3bd3-4a46-8ab3-55d94518eced" containerName="barbican-api-log" Oct 01 05:48:37 crc kubenswrapper[4661]: E1001 05:48:37.212257 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2156e51-3bd3-4a46-8ab3-55d94518eced" containerName="barbican-api" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.212263 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2156e51-3bd3-4a46-8ab3-55d94518eced" containerName="barbican-api" Oct 01 05:48:37 crc kubenswrapper[4661]: E1001 05:48:37.212278 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ab2eb1-516b-4361-ba88-e6ea315a178d" containerName="ceilometer-central-agent" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.212284 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ab2eb1-516b-4361-ba88-e6ea315a178d" containerName="ceilometer-central-agent" Oct 01 05:48:37 crc kubenswrapper[4661]: E1001 05:48:37.212293 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ab2eb1-516b-4361-ba88-e6ea315a178d" containerName="proxy-httpd" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.212299 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ab2eb1-516b-4361-ba88-e6ea315a178d" containerName="proxy-httpd" Oct 01 05:48:37 crc kubenswrapper[4661]: E1001 05:48:37.212315 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fa3d349-9844-4d00-ac96-5c59f46badfa" containerName="horizon" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.212321 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fa3d349-9844-4d00-ac96-5c59f46badfa" containerName="horizon" Oct 01 05:48:37 crc kubenswrapper[4661]: E1001 05:48:37.212346 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ab2eb1-516b-4361-ba88-e6ea315a178d" containerName="sg-core" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.212353 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ab2eb1-516b-4361-ba88-e6ea315a178d" containerName="sg-core" Oct 01 05:48:37 crc kubenswrapper[4661]: E1001 05:48:37.212369 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fa3d349-9844-4d00-ac96-5c59f46badfa" containerName="horizon-log" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.212377 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fa3d349-9844-4d00-ac96-5c59f46badfa" containerName="horizon-log" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.212558 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2156e51-3bd3-4a46-8ab3-55d94518eced" containerName="barbican-api" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.212572 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ab2eb1-516b-4361-ba88-e6ea315a178d" containerName="ceilometer-central-agent" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.212582 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ab2eb1-516b-4361-ba88-e6ea315a178d" containerName="proxy-httpd" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.212594 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ab2eb1-516b-4361-ba88-e6ea315a178d" containerName="sg-core" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.212609 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fa3d349-9844-4d00-ac96-5c59f46badfa" containerName="horizon-log" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.212620 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2156e51-3bd3-4a46-8ab3-55d94518eced" containerName="barbican-api-log" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.212645 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ab2eb1-516b-4361-ba88-e6ea315a178d" containerName="ceilometer-notification-agent" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.212655 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fa3d349-9844-4d00-ac96-5c59f46badfa" containerName="horizon" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.214362 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.225170 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.225333 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.233078 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.233506 4661 scope.go:117] "RemoveContainer" containerID="ec6839ed849bf8f5685890643d00c813a809c29303c260476439654a7b4ab1bf" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.286429 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5fa3d349-9844-4d00-ac96-5c59f46badfa-scripts\") pod \"5fa3d349-9844-4d00-ac96-5c59f46badfa\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.286463 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2156e51-3bd3-4a46-8ab3-55d94518eced-config-data\") pod \"a2156e51-3bd3-4a46-8ab3-55d94518eced\" (UID: \"a2156e51-3bd3-4a46-8ab3-55d94518eced\") " Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.286518 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fa3d349-9844-4d00-ac96-5c59f46badfa-logs\") pod \"5fa3d349-9844-4d00-ac96-5c59f46badfa\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.286609 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fa3d349-9844-4d00-ac96-5c59f46badfa-combined-ca-bundle\") pod \"5fa3d349-9844-4d00-ac96-5c59f46badfa\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.286645 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2156e51-3bd3-4a46-8ab3-55d94518eced-logs\") pod \"a2156e51-3bd3-4a46-8ab3-55d94518eced\" (UID: \"a2156e51-3bd3-4a46-8ab3-55d94518eced\") " Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.286672 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5fa3d349-9844-4d00-ac96-5c59f46badfa-config-data\") pod \"5fa3d349-9844-4d00-ac96-5c59f46badfa\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.286698 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a2156e51-3bd3-4a46-8ab3-55d94518eced-config-data-custom\") pod \"a2156e51-3bd3-4a46-8ab3-55d94518eced\" (UID: \"a2156e51-3bd3-4a46-8ab3-55d94518eced\") " Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.286744 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2156e51-3bd3-4a46-8ab3-55d94518eced-combined-ca-bundle\") pod \"a2156e51-3bd3-4a46-8ab3-55d94518eced\" (UID: \"a2156e51-3bd3-4a46-8ab3-55d94518eced\") " Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.286775 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bln8d\" (UniqueName: \"kubernetes.io/projected/5fa3d349-9844-4d00-ac96-5c59f46badfa-kube-api-access-bln8d\") pod \"5fa3d349-9844-4d00-ac96-5c59f46badfa\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.286824 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9bvr\" (UniqueName: \"kubernetes.io/projected/a2156e51-3bd3-4a46-8ab3-55d94518eced-kube-api-access-v9bvr\") pod \"a2156e51-3bd3-4a46-8ab3-55d94518eced\" (UID: \"a2156e51-3bd3-4a46-8ab3-55d94518eced\") " Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.286874 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fa3d349-9844-4d00-ac96-5c59f46badfa-horizon-tls-certs\") pod \"5fa3d349-9844-4d00-ac96-5c59f46badfa\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.286902 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5fa3d349-9844-4d00-ac96-5c59f46badfa-horizon-secret-key\") pod \"5fa3d349-9844-4d00-ac96-5c59f46badfa\" (UID: \"5fa3d349-9844-4d00-ac96-5c59f46badfa\") " Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.287197 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-config-data\") pod \"ceilometer-0\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.287251 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f517e9fb-0e28-44c3-9d30-e97ec854be99-run-httpd\") pod \"ceilometer-0\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.287293 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f517e9fb-0e28-44c3-9d30-e97ec854be99-log-httpd\") pod \"ceilometer-0\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.287334 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.287350 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-scripts\") pod \"ceilometer-0\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.287363 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.287405 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kmd2\" (UniqueName: \"kubernetes.io/projected/f517e9fb-0e28-44c3-9d30-e97ec854be99-kube-api-access-9kmd2\") pod \"ceilometer-0\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.289230 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2156e51-3bd3-4a46-8ab3-55d94518eced-logs" (OuterVolumeSpecName: "logs") pod "a2156e51-3bd3-4a46-8ab3-55d94518eced" (UID: "a2156e51-3bd3-4a46-8ab3-55d94518eced"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.290347 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fa3d349-9844-4d00-ac96-5c59f46badfa-logs" (OuterVolumeSpecName: "logs") pod "5fa3d349-9844-4d00-ac96-5c59f46badfa" (UID: "5fa3d349-9844-4d00-ac96-5c59f46badfa"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.296544 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2156e51-3bd3-4a46-8ab3-55d94518eced-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a2156e51-3bd3-4a46-8ab3-55d94518eced" (UID: "a2156e51-3bd3-4a46-8ab3-55d94518eced"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.346061 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fa3d349-9844-4d00-ac96-5c59f46badfa-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "5fa3d349-9844-4d00-ac96-5c59f46badfa" (UID: "5fa3d349-9844-4d00-ac96-5c59f46badfa"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.346067 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2156e51-3bd3-4a46-8ab3-55d94518eced-kube-api-access-v9bvr" (OuterVolumeSpecName: "kube-api-access-v9bvr") pod "a2156e51-3bd3-4a46-8ab3-55d94518eced" (UID: "a2156e51-3bd3-4a46-8ab3-55d94518eced"). InnerVolumeSpecName "kube-api-access-v9bvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.356979 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fa3d349-9844-4d00-ac96-5c59f46badfa-kube-api-access-bln8d" (OuterVolumeSpecName: "kube-api-access-bln8d") pod "5fa3d349-9844-4d00-ac96-5c59f46badfa" (UID: "5fa3d349-9844-4d00-ac96-5c59f46badfa"). InnerVolumeSpecName "kube-api-access-bln8d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.371846 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5fa3d349-9844-4d00-ac96-5c59f46badfa-scripts" (OuterVolumeSpecName: "scripts") pod "5fa3d349-9844-4d00-ac96-5c59f46badfa" (UID: "5fa3d349-9844-4d00-ac96-5c59f46badfa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.389848 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2156e51-3bd3-4a46-8ab3-55d94518eced-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a2156e51-3bd3-4a46-8ab3-55d94518eced" (UID: "a2156e51-3bd3-4a46-8ab3-55d94518eced"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.390250 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-config-data\") pod \"ceilometer-0\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.390356 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f517e9fb-0e28-44c3-9d30-e97ec854be99-run-httpd\") pod \"ceilometer-0\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.390439 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f517e9fb-0e28-44c3-9d30-e97ec854be99-log-httpd\") pod \"ceilometer-0\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.390531 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.390549 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-scripts\") pod \"ceilometer-0\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.390563 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.390658 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kmd2\" (UniqueName: \"kubernetes.io/projected/f517e9fb-0e28-44c3-9d30-e97ec854be99-kube-api-access-9kmd2\") pod \"ceilometer-0\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.390836 4661 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2156e51-3bd3-4a46-8ab3-55d94518eced-logs\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.390851 4661 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a2156e51-3bd3-4a46-8ab3-55d94518eced-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.390863 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2156e51-3bd3-4a46-8ab3-55d94518eced-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.390872 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bln8d\" (UniqueName: \"kubernetes.io/projected/5fa3d349-9844-4d00-ac96-5c59f46badfa-kube-api-access-bln8d\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.390881 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9bvr\" (UniqueName: \"kubernetes.io/projected/a2156e51-3bd3-4a46-8ab3-55d94518eced-kube-api-access-v9bvr\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.390889 4661 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5fa3d349-9844-4d00-ac96-5c59f46badfa-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.390897 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5fa3d349-9844-4d00-ac96-5c59f46badfa-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.390905 4661 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fa3d349-9844-4d00-ac96-5c59f46badfa-logs\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.392187 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fa3d349-9844-4d00-ac96-5c59f46badfa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5fa3d349-9844-4d00-ac96-5c59f46badfa" (UID: "5fa3d349-9844-4d00-ac96-5c59f46badfa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.392539 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f517e9fb-0e28-44c3-9d30-e97ec854be99-run-httpd\") pod \"ceilometer-0\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.396106 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.396741 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-config-data\") pod \"ceilometer-0\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.397261 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f517e9fb-0e28-44c3-9d30-e97ec854be99-log-httpd\") pod \"ceilometer-0\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.398737 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-scripts\") pod \"ceilometer-0\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.399973 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.404933 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5fa3d349-9844-4d00-ac96-5c59f46badfa-config-data" (OuterVolumeSpecName: "config-data") pod "5fa3d349-9844-4d00-ac96-5c59f46badfa" (UID: "5fa3d349-9844-4d00-ac96-5c59f46badfa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.417319 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kmd2\" (UniqueName: \"kubernetes.io/projected/f517e9fb-0e28-44c3-9d30-e97ec854be99-kube-api-access-9kmd2\") pod \"ceilometer-0\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.434766 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fa3d349-9844-4d00-ac96-5c59f46badfa-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "5fa3d349-9844-4d00-ac96-5c59f46badfa" (UID: "5fa3d349-9844-4d00-ac96-5c59f46badfa"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.460737 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2156e51-3bd3-4a46-8ab3-55d94518eced-config-data" (OuterVolumeSpecName: "config-data") pod "a2156e51-3bd3-4a46-8ab3-55d94518eced" (UID: "a2156e51-3bd3-4a46-8ab3-55d94518eced"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.493201 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fa3d349-9844-4d00-ac96-5c59f46badfa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.493238 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5fa3d349-9844-4d00-ac96-5c59f46badfa-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.493247 4661 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fa3d349-9844-4d00-ac96-5c59f46badfa-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.493256 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2156e51-3bd3-4a46-8ab3-55d94518eced-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.588060 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.663766 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-q5rzr" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.769583 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9ab2eb1-516b-4361-ba88-e6ea315a178d" path="/var/lib/kubelet/pods/b9ab2eb1-516b-4361-ba88-e6ea315a178d/volumes" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.800495 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rcctr\" (UniqueName: \"kubernetes.io/projected/9b4455f4-4266-4c94-a56f-5f06005485fa-kube-api-access-rcctr\") pod \"9b4455f4-4266-4c94-a56f-5f06005485fa\" (UID: \"9b4455f4-4266-4c94-a56f-5f06005485fa\") " Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.803833 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b4455f4-4266-4c94-a56f-5f06005485fa-kube-api-access-rcctr" (OuterVolumeSpecName: "kube-api-access-rcctr") pod "9b4455f4-4266-4c94-a56f-5f06005485fa" (UID: "9b4455f4-4266-4c94-a56f-5f06005485fa"). InnerVolumeSpecName "kube-api-access-rcctr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:37 crc kubenswrapper[4661]: I1001 05:48:37.903247 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rcctr\" (UniqueName: \"kubernetes.io/projected/9b4455f4-4266-4c94-a56f-5f06005485fa-kube-api-access-rcctr\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:38 crc kubenswrapper[4661]: I1001 05:48:38.022588 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68c4554784-zv8tz" event={"ID":"a2156e51-3bd3-4a46-8ab3-55d94518eced","Type":"ContainerDied","Data":"aa259baa7383c3508ec8862fe9654b62e5f9f12603abba412979cafb6e1b08f5"} Oct 01 05:48:38 crc kubenswrapper[4661]: I1001 05:48:38.022655 4661 scope.go:117] "RemoveContainer" containerID="3ec781b6fa67c1acefc5cff6ef4ea62b86f3a091fe3e332033e8427c084f388f" Oct 01 05:48:38 crc kubenswrapper[4661]: I1001 05:48:38.022662 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-68c4554784-zv8tz" Oct 01 05:48:38 crc kubenswrapper[4661]: I1001 05:48:38.027218 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-q5rzr" event={"ID":"9b4455f4-4266-4c94-a56f-5f06005485fa","Type":"ContainerDied","Data":"ce37ad606ad0f69646e09cce90a19dffb151e5158a1be25825500602f24aae9d"} Oct 01 05:48:38 crc kubenswrapper[4661]: I1001 05:48:38.027251 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ce37ad606ad0f69646e09cce90a19dffb151e5158a1be25825500602f24aae9d" Oct 01 05:48:38 crc kubenswrapper[4661]: I1001 05:48:38.027327 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-q5rzr" Oct 01 05:48:38 crc kubenswrapper[4661]: I1001 05:48:38.038905 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8674487c84-nz4kb" Oct 01 05:48:38 crc kubenswrapper[4661]: I1001 05:48:38.050488 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-68c4554784-zv8tz"] Oct 01 05:48:38 crc kubenswrapper[4661]: I1001 05:48:38.062810 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-68c4554784-zv8tz"] Oct 01 05:48:38 crc kubenswrapper[4661]: I1001 05:48:38.063912 4661 scope.go:117] "RemoveContainer" containerID="c1745ee10c2194bcfcbca61efd97d4444aaed22411c0d5f6548df366b1300ba5" Oct 01 05:48:38 crc kubenswrapper[4661]: I1001 05:48:38.089186 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-8674487c84-nz4kb"] Oct 01 05:48:38 crc kubenswrapper[4661]: I1001 05:48:38.098139 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-8674487c84-nz4kb"] Oct 01 05:48:38 crc kubenswrapper[4661]: I1001 05:48:38.188038 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:48:38 crc kubenswrapper[4661]: I1001 05:48:38.633384 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-qt9ck" Oct 01 05:48:38 crc kubenswrapper[4661]: I1001 05:48:38.638850 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jmdzw" Oct 01 05:48:38 crc kubenswrapper[4661]: I1001 05:48:38.724342 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vl58z\" (UniqueName: \"kubernetes.io/projected/67a75668-b682-4fb4-a779-8ff578213667-kube-api-access-vl58z\") pod \"67a75668-b682-4fb4-a779-8ff578213667\" (UID: \"67a75668-b682-4fb4-a779-8ff578213667\") " Oct 01 05:48:38 crc kubenswrapper[4661]: I1001 05:48:38.724519 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jczsk\" (UniqueName: \"kubernetes.io/projected/60845954-9b29-47bd-9080-790730863053-kube-api-access-jczsk\") pod \"60845954-9b29-47bd-9080-790730863053\" (UID: \"60845954-9b29-47bd-9080-790730863053\") " Oct 01 05:48:38 crc kubenswrapper[4661]: I1001 05:48:38.737492 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60845954-9b29-47bd-9080-790730863053-kube-api-access-jczsk" (OuterVolumeSpecName: "kube-api-access-jczsk") pod "60845954-9b29-47bd-9080-790730863053" (UID: "60845954-9b29-47bd-9080-790730863053"). InnerVolumeSpecName "kube-api-access-jczsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:38 crc kubenswrapper[4661]: I1001 05:48:38.737600 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67a75668-b682-4fb4-a779-8ff578213667-kube-api-access-vl58z" (OuterVolumeSpecName: "kube-api-access-vl58z") pod "67a75668-b682-4fb4-a779-8ff578213667" (UID: "67a75668-b682-4fb4-a779-8ff578213667"). InnerVolumeSpecName "kube-api-access-vl58z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:38 crc kubenswrapper[4661]: I1001 05:48:38.826796 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jczsk\" (UniqueName: \"kubernetes.io/projected/60845954-9b29-47bd-9080-790730863053-kube-api-access-jczsk\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:38 crc kubenswrapper[4661]: I1001 05:48:38.826843 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vl58z\" (UniqueName: \"kubernetes.io/projected/67a75668-b682-4fb4-a779-8ff578213667-kube-api-access-vl58z\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:39 crc kubenswrapper[4661]: I1001 05:48:39.058852 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f517e9fb-0e28-44c3-9d30-e97ec854be99","Type":"ContainerStarted","Data":"0bfb35e257792c466c1e295e92f7a10a2b68a9df84fc4411291b08723c40f014"} Oct 01 05:48:39 crc kubenswrapper[4661]: I1001 05:48:39.059338 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f517e9fb-0e28-44c3-9d30-e97ec854be99","Type":"ContainerStarted","Data":"e716c2f34481a0fb315b5fa7b2ef92c38826914714c8aa5be8c6a057871d9bb3"} Oct 01 05:48:39 crc kubenswrapper[4661]: I1001 05:48:39.068612 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-qt9ck" event={"ID":"67a75668-b682-4fb4-a779-8ff578213667","Type":"ContainerDied","Data":"dc4ba02196981f6b876eb48f7a045055748deac37605f1798398a6fe0ce5490f"} Oct 01 05:48:39 crc kubenswrapper[4661]: I1001 05:48:39.068682 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc4ba02196981f6b876eb48f7a045055748deac37605f1798398a6fe0ce5490f" Oct 01 05:48:39 crc kubenswrapper[4661]: I1001 05:48:39.068738 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-qt9ck" Oct 01 05:48:39 crc kubenswrapper[4661]: I1001 05:48:39.070460 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-jmdzw" event={"ID":"60845954-9b29-47bd-9080-790730863053","Type":"ContainerDied","Data":"8ead07efa476b20e700e0e9902920ff63b19ea6a3b7fc53c0543c0eb960b3425"} Oct 01 05:48:39 crc kubenswrapper[4661]: I1001 05:48:39.070492 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ead07efa476b20e700e0e9902920ff63b19ea6a3b7fc53c0543c0eb960b3425" Oct 01 05:48:39 crc kubenswrapper[4661]: I1001 05:48:39.070549 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jmdzw" Oct 01 05:48:39 crc kubenswrapper[4661]: I1001 05:48:39.769732 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fa3d349-9844-4d00-ac96-5c59f46badfa" path="/var/lib/kubelet/pods/5fa3d349-9844-4d00-ac96-5c59f46badfa/volumes" Oct 01 05:48:39 crc kubenswrapper[4661]: I1001 05:48:39.770744 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2156e51-3bd3-4a46-8ab3-55d94518eced" path="/var/lib/kubelet/pods/a2156e51-3bd3-4a46-8ab3-55d94518eced/volumes" Oct 01 05:48:40 crc kubenswrapper[4661]: I1001 05:48:40.080280 4661 generic.go:334] "Generic (PLEG): container finished" podID="77022b65-9c8f-4173-957d-0d0e457bd838" containerID="65668dfb53b2e53a0efbe7ba9d0323e3b0f8dec532ea046c5bc9fb5a15cb1373" exitCode=0 Oct 01 05:48:40 crc kubenswrapper[4661]: I1001 05:48:40.080349 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-q2jck" event={"ID":"77022b65-9c8f-4173-957d-0d0e457bd838","Type":"ContainerDied","Data":"65668dfb53b2e53a0efbe7ba9d0323e3b0f8dec532ea046c5bc9fb5a15cb1373"} Oct 01 05:48:40 crc kubenswrapper[4661]: I1001 05:48:40.084023 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f517e9fb-0e28-44c3-9d30-e97ec854be99","Type":"ContainerStarted","Data":"dfc3c198ca949bf63825c762f4853db619eca77644921c6a42f19fabad290419"} Oct 01 05:48:40 crc kubenswrapper[4661]: I1001 05:48:40.084054 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f517e9fb-0e28-44c3-9d30-e97ec854be99","Type":"ContainerStarted","Data":"3bc71c128935810138bfd2a17cd2b83202647bf4cd29a94298f45ee032f425b0"} Oct 01 05:48:42 crc kubenswrapper[4661]: I1001 05:48:42.730272 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:42 crc kubenswrapper[4661]: I1001 05:48:42.745724 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-57dbcb9cbf-64x2k" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.292964 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-e3fc-account-create-nj9hp"] Oct 01 05:48:44 crc kubenswrapper[4661]: E1001 05:48:44.293364 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60845954-9b29-47bd-9080-790730863053" containerName="mariadb-database-create" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.293377 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="60845954-9b29-47bd-9080-790730863053" containerName="mariadb-database-create" Oct 01 05:48:44 crc kubenswrapper[4661]: E1001 05:48:44.293394 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67a75668-b682-4fb4-a779-8ff578213667" containerName="mariadb-database-create" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.293400 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="67a75668-b682-4fb4-a779-8ff578213667" containerName="mariadb-database-create" Oct 01 05:48:44 crc kubenswrapper[4661]: E1001 05:48:44.293410 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b4455f4-4266-4c94-a56f-5f06005485fa" containerName="mariadb-database-create" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.293416 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b4455f4-4266-4c94-a56f-5f06005485fa" containerName="mariadb-database-create" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.293595 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="60845954-9b29-47bd-9080-790730863053" containerName="mariadb-database-create" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.293606 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="67a75668-b682-4fb4-a779-8ff578213667" containerName="mariadb-database-create" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.293624 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b4455f4-4266-4c94-a56f-5f06005485fa" containerName="mariadb-database-create" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.300557 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-e3fc-account-create-nj9hp" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.303257 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.306977 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-e3fc-account-create-nj9hp"] Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.466236 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qdb8\" (UniqueName: \"kubernetes.io/projected/2a138f3b-8b69-490d-9e44-67d9b56247f2-kube-api-access-9qdb8\") pod \"nova-api-e3fc-account-create-nj9hp\" (UID: \"2a138f3b-8b69-490d-9e44-67d9b56247f2\") " pod="openstack/nova-api-e3fc-account-create-nj9hp" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.505841 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-1ba1-account-create-w5rqs"] Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.510987 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1ba1-account-create-w5rqs" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.517982 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.539362 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-1ba1-account-create-w5rqs"] Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.569060 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qdb8\" (UniqueName: \"kubernetes.io/projected/2a138f3b-8b69-490d-9e44-67d9b56247f2-kube-api-access-9qdb8\") pod \"nova-api-e3fc-account-create-nj9hp\" (UID: \"2a138f3b-8b69-490d-9e44-67d9b56247f2\") " pod="openstack/nova-api-e3fc-account-create-nj9hp" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.588615 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qdb8\" (UniqueName: \"kubernetes.io/projected/2a138f3b-8b69-490d-9e44-67d9b56247f2-kube-api-access-9qdb8\") pod \"nova-api-e3fc-account-create-nj9hp\" (UID: \"2a138f3b-8b69-490d-9e44-67d9b56247f2\") " pod="openstack/nova-api-e3fc-account-create-nj9hp" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.623882 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-e3fc-account-create-nj9hp" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.671051 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5967\" (UniqueName: \"kubernetes.io/projected/22866a21-7ff0-46cd-8bbe-5a133c012e47-kube-api-access-k5967\") pod \"nova-cell0-1ba1-account-create-w5rqs\" (UID: \"22866a21-7ff0-46cd-8bbe-5a133c012e47\") " pod="openstack/nova-cell0-1ba1-account-create-w5rqs" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.690562 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-4f36-account-create-b6p98"] Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.692680 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4f36-account-create-b6p98" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.700614 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-4f36-account-create-b6p98"] Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.704560 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.776009 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5967\" (UniqueName: \"kubernetes.io/projected/22866a21-7ff0-46cd-8bbe-5a133c012e47-kube-api-access-k5967\") pod \"nova-cell0-1ba1-account-create-w5rqs\" (UID: \"22866a21-7ff0-46cd-8bbe-5a133c012e47\") " pod="openstack/nova-cell0-1ba1-account-create-w5rqs" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.808685 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5967\" (UniqueName: \"kubernetes.io/projected/22866a21-7ff0-46cd-8bbe-5a133c012e47-kube-api-access-k5967\") pod \"nova-cell0-1ba1-account-create-w5rqs\" (UID: \"22866a21-7ff0-46cd-8bbe-5a133c012e47\") " pod="openstack/nova-cell0-1ba1-account-create-w5rqs" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.832887 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1ba1-account-create-w5rqs" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.877741 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dq66c\" (UniqueName: \"kubernetes.io/projected/c0168d5b-fbba-4e5d-9efe-556278ffd191-kube-api-access-dq66c\") pod \"nova-cell1-4f36-account-create-b6p98\" (UID: \"c0168d5b-fbba-4e5d-9efe-556278ffd191\") " pod="openstack/nova-cell1-4f36-account-create-b6p98" Oct 01 05:48:44 crc kubenswrapper[4661]: I1001 05:48:44.980326 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dq66c\" (UniqueName: \"kubernetes.io/projected/c0168d5b-fbba-4e5d-9efe-556278ffd191-kube-api-access-dq66c\") pod \"nova-cell1-4f36-account-create-b6p98\" (UID: \"c0168d5b-fbba-4e5d-9efe-556278ffd191\") " pod="openstack/nova-cell1-4f36-account-create-b6p98" Oct 01 05:48:45 crc kubenswrapper[4661]: I1001 05:48:45.000097 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dq66c\" (UniqueName: \"kubernetes.io/projected/c0168d5b-fbba-4e5d-9efe-556278ffd191-kube-api-access-dq66c\") pod \"nova-cell1-4f36-account-create-b6p98\" (UID: \"c0168d5b-fbba-4e5d-9efe-556278ffd191\") " pod="openstack/nova-cell1-4f36-account-create-b6p98" Oct 01 05:48:45 crc kubenswrapper[4661]: I1001 05:48:45.060079 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4f36-account-create-b6p98" Oct 01 05:48:46 crc kubenswrapper[4661]: I1001 05:48:46.107348 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5cdff47b98-krjm2" Oct 01 05:48:46 crc kubenswrapper[4661]: I1001 05:48:46.667308 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-q2jck" Oct 01 05:48:46 crc kubenswrapper[4661]: I1001 05:48:46.719246 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-combined-ca-bundle\") pod \"77022b65-9c8f-4173-957d-0d0e457bd838\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " Oct 01 05:48:46 crc kubenswrapper[4661]: I1001 05:48:46.719314 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-scripts\") pod \"77022b65-9c8f-4173-957d-0d0e457bd838\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " Oct 01 05:48:46 crc kubenswrapper[4661]: I1001 05:48:46.719370 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/77022b65-9c8f-4173-957d-0d0e457bd838-etc-machine-id\") pod \"77022b65-9c8f-4173-957d-0d0e457bd838\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " Oct 01 05:48:46 crc kubenswrapper[4661]: I1001 05:48:46.719421 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dkd2h\" (UniqueName: \"kubernetes.io/projected/77022b65-9c8f-4173-957d-0d0e457bd838-kube-api-access-dkd2h\") pod \"77022b65-9c8f-4173-957d-0d0e457bd838\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " Oct 01 05:48:46 crc kubenswrapper[4661]: I1001 05:48:46.719462 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-config-data\") pod \"77022b65-9c8f-4173-957d-0d0e457bd838\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " Oct 01 05:48:46 crc kubenswrapper[4661]: I1001 05:48:46.719500 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-db-sync-config-data\") pod \"77022b65-9c8f-4173-957d-0d0e457bd838\" (UID: \"77022b65-9c8f-4173-957d-0d0e457bd838\") " Oct 01 05:48:46 crc kubenswrapper[4661]: I1001 05:48:46.721166 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/77022b65-9c8f-4173-957d-0d0e457bd838-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "77022b65-9c8f-4173-957d-0d0e457bd838" (UID: "77022b65-9c8f-4173-957d-0d0e457bd838"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:48:46 crc kubenswrapper[4661]: I1001 05:48:46.725800 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "77022b65-9c8f-4173-957d-0d0e457bd838" (UID: "77022b65-9c8f-4173-957d-0d0e457bd838"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:46 crc kubenswrapper[4661]: I1001 05:48:46.726415 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77022b65-9c8f-4173-957d-0d0e457bd838-kube-api-access-dkd2h" (OuterVolumeSpecName: "kube-api-access-dkd2h") pod "77022b65-9c8f-4173-957d-0d0e457bd838" (UID: "77022b65-9c8f-4173-957d-0d0e457bd838"). InnerVolumeSpecName "kube-api-access-dkd2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:46 crc kubenswrapper[4661]: I1001 05:48:46.726964 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-scripts" (OuterVolumeSpecName: "scripts") pod "77022b65-9c8f-4173-957d-0d0e457bd838" (UID: "77022b65-9c8f-4173-957d-0d0e457bd838"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:46 crc kubenswrapper[4661]: I1001 05:48:46.768266 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "77022b65-9c8f-4173-957d-0d0e457bd838" (UID: "77022b65-9c8f-4173-957d-0d0e457bd838"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:46 crc kubenswrapper[4661]: I1001 05:48:46.809432 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-config-data" (OuterVolumeSpecName: "config-data") pod "77022b65-9c8f-4173-957d-0d0e457bd838" (UID: "77022b65-9c8f-4173-957d-0d0e457bd838"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:46 crc kubenswrapper[4661]: I1001 05:48:46.827803 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:46 crc kubenswrapper[4661]: I1001 05:48:46.828139 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:46 crc kubenswrapper[4661]: I1001 05:48:46.828157 4661 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/77022b65-9c8f-4173-957d-0d0e457bd838-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:46 crc kubenswrapper[4661]: I1001 05:48:46.828167 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dkd2h\" (UniqueName: \"kubernetes.io/projected/77022b65-9c8f-4173-957d-0d0e457bd838-kube-api-access-dkd2h\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:46 crc kubenswrapper[4661]: I1001 05:48:46.828177 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:46 crc kubenswrapper[4661]: I1001 05:48:46.828186 4661 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/77022b65-9c8f-4173-957d-0d0e457bd838-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:47 crc kubenswrapper[4661]: I1001 05:48:47.030904 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-1ba1-account-create-w5rqs"] Oct 01 05:48:47 crc kubenswrapper[4661]: E1001 05:48:47.041337 4661 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8fdca12_5e6d_43d7_ae59_33b3a388ada4.slice/crio-7bd34b4a10d518e048f69aa9fbf1af80c2025a202af069f0593233df27c325d9\": RecentStats: unable to find data in memory cache]" Oct 01 05:48:47 crc kubenswrapper[4661]: I1001 05:48:47.100381 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-4f36-account-create-b6p98"] Oct 01 05:48:47 crc kubenswrapper[4661]: I1001 05:48:47.107477 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-e3fc-account-create-nj9hp"] Oct 01 05:48:47 crc kubenswrapper[4661]: W1001 05:48:47.116873 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0168d5b_fbba_4e5d_9efe_556278ffd191.slice/crio-af85527c52a2562e81f4d0e367201f4fd3f39b0f302aa387dda25df319fd2807 WatchSource:0}: Error finding container af85527c52a2562e81f4d0e367201f4fd3f39b0f302aa387dda25df319fd2807: Status 404 returned error can't find the container with id af85527c52a2562e81f4d0e367201f4fd3f39b0f302aa387dda25df319fd2807 Oct 01 05:48:47 crc kubenswrapper[4661]: I1001 05:48:47.169669 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"c10d9095-6d32-4b4a-8706-d06e0693ddb9","Type":"ContainerStarted","Data":"a45bea183838dacd02e4be66ee5e87f9a5db578898fb7dea031a06b0056ab15b"} Oct 01 05:48:47 crc kubenswrapper[4661]: I1001 05:48:47.171227 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-e3fc-account-create-nj9hp" event={"ID":"2a138f3b-8b69-490d-9e44-67d9b56247f2","Type":"ContainerStarted","Data":"29e1af6519997690332b06733fbdd8b3e905ae7747cde6c3246adb5584a1d544"} Oct 01 05:48:47 crc kubenswrapper[4661]: I1001 05:48:47.174419 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f517e9fb-0e28-44c3-9d30-e97ec854be99","Type":"ContainerStarted","Data":"8d55b6e6f80431fbc2cbf64f46c047797ee3eca6b9527684fcf09c807c26f708"} Oct 01 05:48:47 crc kubenswrapper[4661]: I1001 05:48:47.174488 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 01 05:48:47 crc kubenswrapper[4661]: I1001 05:48:47.175419 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4f36-account-create-b6p98" event={"ID":"c0168d5b-fbba-4e5d-9efe-556278ffd191","Type":"ContainerStarted","Data":"af85527c52a2562e81f4d0e367201f4fd3f39b0f302aa387dda25df319fd2807"} Oct 01 05:48:47 crc kubenswrapper[4661]: I1001 05:48:47.176149 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-1ba1-account-create-w5rqs" event={"ID":"22866a21-7ff0-46cd-8bbe-5a133c012e47","Type":"ContainerStarted","Data":"bdf69b51fe38c7fa141d61b8916e7c929ba5fd9518265a35033dc70df9fd7308"} Oct 01 05:48:47 crc kubenswrapper[4661]: I1001 05:48:47.178273 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-q2jck" event={"ID":"77022b65-9c8f-4173-957d-0d0e457bd838","Type":"ContainerDied","Data":"fabab7375587685cf1fb32d0a719e123d9d0c5bf04e3986e7a216c667643c602"} Oct 01 05:48:47 crc kubenswrapper[4661]: I1001 05:48:47.178303 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fabab7375587685cf1fb32d0a719e123d9d0c5bf04e3986e7a216c667643c602" Oct 01 05:48:47 crc kubenswrapper[4661]: I1001 05:48:47.178368 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-q2jck" Oct 01 05:48:47 crc kubenswrapper[4661]: I1001 05:48:47.187423 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=4.542351563 podStartE2EDuration="21.187407956s" podCreationTimestamp="2025-10-01 05:48:26 +0000 UTC" firstStartedPulling="2025-10-01 05:48:29.927287677 +0000 UTC m=+1158.865266291" lastFinishedPulling="2025-10-01 05:48:46.57234407 +0000 UTC m=+1175.510322684" observedRunningTime="2025-10-01 05:48:47.18280182 +0000 UTC m=+1176.120780454" watchObservedRunningTime="2025-10-01 05:48:47.187407956 +0000 UTC m=+1176.125386570" Oct 01 05:48:47 crc kubenswrapper[4661]: I1001 05:48:47.211572 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.857397964 podStartE2EDuration="10.211545219s" podCreationTimestamp="2025-10-01 05:48:37 +0000 UTC" firstStartedPulling="2025-10-01 05:48:38.215696176 +0000 UTC m=+1167.153674790" lastFinishedPulling="2025-10-01 05:48:46.569843431 +0000 UTC m=+1175.507822045" observedRunningTime="2025-10-01 05:48:47.204194487 +0000 UTC m=+1176.142173101" watchObservedRunningTime="2025-10-01 05:48:47.211545219 +0000 UTC m=+1176.149523833" Oct 01 05:48:47 crc kubenswrapper[4661]: I1001 05:48:47.757263 4661 scope.go:117] "RemoveContainer" containerID="264f7e328e4ac57c670183a8834d2477fa48b96eef460bdc4889eab4cd54ba74" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.030761 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 05:48:48 crc kubenswrapper[4661]: E1001 05:48:48.031154 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77022b65-9c8f-4173-957d-0d0e457bd838" containerName="cinder-db-sync" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.031169 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="77022b65-9c8f-4173-957d-0d0e457bd838" containerName="cinder-db-sync" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.031395 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="77022b65-9c8f-4173-957d-0d0e457bd838" containerName="cinder-db-sync" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.032334 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.038970 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.039191 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.039299 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.039784 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-ht7jd" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.043127 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.075781 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " pod="openstack/cinder-scheduler-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.075840 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0297f772-f42e-4830-a2f8-485658085c9d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " pod="openstack/cinder-scheduler-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.075900 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-config-data\") pod \"cinder-scheduler-0\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " pod="openstack/cinder-scheduler-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.075924 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-scripts\") pod \"cinder-scheduler-0\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " pod="openstack/cinder-scheduler-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.075944 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkqpq\" (UniqueName: \"kubernetes.io/projected/0297f772-f42e-4830-a2f8-485658085c9d-kube-api-access-hkqpq\") pod \"cinder-scheduler-0\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " pod="openstack/cinder-scheduler-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.075986 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " pod="openstack/cinder-scheduler-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.118533 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75958fc765-qfvf9"] Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.120321 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.137466 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75958fc765-qfvf9"] Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.186071 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " pod="openstack/cinder-scheduler-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.186480 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0297f772-f42e-4830-a2f8-485658085c9d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " pod="openstack/cinder-scheduler-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.186571 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-config-data\") pod \"cinder-scheduler-0\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " pod="openstack/cinder-scheduler-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.186604 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-scripts\") pod \"cinder-scheduler-0\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " pod="openstack/cinder-scheduler-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.186624 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkqpq\" (UniqueName: \"kubernetes.io/projected/0297f772-f42e-4830-a2f8-485658085c9d-kube-api-access-hkqpq\") pod \"cinder-scheduler-0\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " pod="openstack/cinder-scheduler-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.186668 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-config\") pod \"dnsmasq-dns-75958fc765-qfvf9\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.186696 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-ovsdbserver-nb\") pod \"dnsmasq-dns-75958fc765-qfvf9\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.186734 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-ovsdbserver-sb\") pod \"dnsmasq-dns-75958fc765-qfvf9\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.186763 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " pod="openstack/cinder-scheduler-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.186783 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gz9l\" (UniqueName: \"kubernetes.io/projected/c221754f-65eb-4272-b438-d2d8591c3645-kube-api-access-4gz9l\") pod \"dnsmasq-dns-75958fc765-qfvf9\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.186816 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-dns-swift-storage-0\") pod \"dnsmasq-dns-75958fc765-qfvf9\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.186838 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-dns-svc\") pod \"dnsmasq-dns-75958fc765-qfvf9\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.187583 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0297f772-f42e-4830-a2f8-485658085c9d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " pod="openstack/cinder-scheduler-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.192884 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " pod="openstack/cinder-scheduler-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.202105 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.209119 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.210003 4661 generic.go:334] "Generic (PLEG): container finished" podID="22866a21-7ff0-46cd-8bbe-5a133c012e47" containerID="6352b58cc9e764d94a8c56172765b178676fe88c08c422aa7051b4c057710b88" exitCode=0 Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.210179 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-1ba1-account-create-w5rqs" event={"ID":"22866a21-7ff0-46cd-8bbe-5a133c012e47","Type":"ContainerDied","Data":"6352b58cc9e764d94a8c56172765b178676fe88c08c422aa7051b4c057710b88"} Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.212333 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkqpq\" (UniqueName: \"kubernetes.io/projected/0297f772-f42e-4830-a2f8-485658085c9d-kube-api-access-hkqpq\") pod \"cinder-scheduler-0\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " pod="openstack/cinder-scheduler-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.212857 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.218188 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " pod="openstack/cinder-scheduler-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.219186 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-config-data\") pod \"cinder-scheduler-0\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " pod="openstack/cinder-scheduler-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.220711 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-scripts\") pod \"cinder-scheduler-0\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " pod="openstack/cinder-scheduler-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.228665 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"87a9e404-beb1-4f1d-a7a2-188ccdacbb81","Type":"ContainerStarted","Data":"71cf4296b224f8f5ca458af2508a68d347ed20572d1386a3d28c38271e2d2e61"} Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.230781 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.237566 4661 generic.go:334] "Generic (PLEG): container finished" podID="2a138f3b-8b69-490d-9e44-67d9b56247f2" containerID="ab543598b5b7e4b7eaf1bf667901b89b5d375560db221ec6870faa8731e59ed8" exitCode=0 Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.237649 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-e3fc-account-create-nj9hp" event={"ID":"2a138f3b-8b69-490d-9e44-67d9b56247f2","Type":"ContainerDied","Data":"ab543598b5b7e4b7eaf1bf667901b89b5d375560db221ec6870faa8731e59ed8"} Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.241365 4661 generic.go:334] "Generic (PLEG): container finished" podID="c0168d5b-fbba-4e5d-9efe-556278ffd191" containerID="ac21ee7474abda5faf8aa48e1085b7e8c890fd7166e5567fa31333ed4523fe5e" exitCode=0 Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.242295 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4f36-account-create-b6p98" event={"ID":"c0168d5b-fbba-4e5d-9efe-556278ffd191","Type":"ContainerDied","Data":"ac21ee7474abda5faf8aa48e1085b7e8c890fd7166e5567fa31333ed4523fe5e"} Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.291909 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gz9l\" (UniqueName: \"kubernetes.io/projected/c221754f-65eb-4272-b438-d2d8591c3645-kube-api-access-4gz9l\") pod \"dnsmasq-dns-75958fc765-qfvf9\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.292766 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-dns-swift-storage-0\") pod \"dnsmasq-dns-75958fc765-qfvf9\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.292809 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-dns-svc\") pod \"dnsmasq-dns-75958fc765-qfvf9\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.292842 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-config-data\") pod \"cinder-api-0\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.292953 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/161c593c-0263-4ecb-a720-69366d51a827-logs\") pod \"cinder-api-0\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.293021 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-config-data-custom\") pod \"cinder-api-0\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.293060 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.293082 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tb8wd\" (UniqueName: \"kubernetes.io/projected/161c593c-0263-4ecb-a720-69366d51a827-kube-api-access-tb8wd\") pod \"cinder-api-0\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.293121 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-scripts\") pod \"cinder-api-0\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.293251 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-config\") pod \"dnsmasq-dns-75958fc765-qfvf9\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.293283 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-ovsdbserver-nb\") pod \"dnsmasq-dns-75958fc765-qfvf9\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.293347 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-ovsdbserver-sb\") pod \"dnsmasq-dns-75958fc765-qfvf9\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.293368 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/161c593c-0263-4ecb-a720-69366d51a827-etc-machine-id\") pod \"cinder-api-0\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.294917 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-dns-swift-storage-0\") pod \"dnsmasq-dns-75958fc765-qfvf9\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.295401 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-dns-svc\") pod \"dnsmasq-dns-75958fc765-qfvf9\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.297993 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-config\") pod \"dnsmasq-dns-75958fc765-qfvf9\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.298810 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-ovsdbserver-sb\") pod \"dnsmasq-dns-75958fc765-qfvf9\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.298965 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-ovsdbserver-nb\") pod \"dnsmasq-dns-75958fc765-qfvf9\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.329311 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gz9l\" (UniqueName: \"kubernetes.io/projected/c221754f-65eb-4272-b438-d2d8591c3645-kube-api-access-4gz9l\") pod \"dnsmasq-dns-75958fc765-qfvf9\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.387763 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.394552 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-scripts\") pod \"cinder-api-0\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.394658 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/161c593c-0263-4ecb-a720-69366d51a827-etc-machine-id\") pod \"cinder-api-0\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.394700 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-config-data\") pod \"cinder-api-0\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.394748 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/161c593c-0263-4ecb-a720-69366d51a827-logs\") pod \"cinder-api-0\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.394776 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-config-data-custom\") pod \"cinder-api-0\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.394805 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.394824 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tb8wd\" (UniqueName: \"kubernetes.io/projected/161c593c-0263-4ecb-a720-69366d51a827-kube-api-access-tb8wd\") pod \"cinder-api-0\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.395656 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/161c593c-0263-4ecb-a720-69366d51a827-logs\") pod \"cinder-api-0\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.395696 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/161c593c-0263-4ecb-a720-69366d51a827-etc-machine-id\") pod \"cinder-api-0\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.400410 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-scripts\") pod \"cinder-api-0\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.403502 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-config-data\") pod \"cinder-api-0\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.405375 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.406039 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-config-data-custom\") pod \"cinder-api-0\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.417148 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tb8wd\" (UniqueName: \"kubernetes.io/projected/161c593c-0263-4ecb-a720-69366d51a827-kube-api-access-tb8wd\") pod \"cinder-api-0\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.470055 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.658689 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 01 05:48:48 crc kubenswrapper[4661]: W1001 05:48:48.965102 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc221754f_65eb_4272_b438_d2d8591c3645.slice/crio-586b8fb1c3e5a8cf38a7020216e9ed271340bc7b6680dc771e6efcaa6a46c189 WatchSource:0}: Error finding container 586b8fb1c3e5a8cf38a7020216e9ed271340bc7b6680dc771e6efcaa6a46c189: Status 404 returned error can't find the container with id 586b8fb1c3e5a8cf38a7020216e9ed271340bc7b6680dc771e6efcaa6a46c189 Oct 01 05:48:48 crc kubenswrapper[4661]: I1001 05:48:48.965846 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75958fc765-qfvf9"] Oct 01 05:48:49 crc kubenswrapper[4661]: I1001 05:48:48.996259 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 05:48:49 crc kubenswrapper[4661]: I1001 05:48:49.240356 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 01 05:48:49 crc kubenswrapper[4661]: I1001 05:48:49.273498 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75958fc765-qfvf9" event={"ID":"c221754f-65eb-4272-b438-d2d8591c3645","Type":"ContainerStarted","Data":"1efd1538ecb45e0543f9800d5d97592018eb1699c5cb24ba2c5ef1aa4c511481"} Oct 01 05:48:49 crc kubenswrapper[4661]: I1001 05:48:49.273547 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75958fc765-qfvf9" event={"ID":"c221754f-65eb-4272-b438-d2d8591c3645","Type":"ContainerStarted","Data":"586b8fb1c3e5a8cf38a7020216e9ed271340bc7b6680dc771e6efcaa6a46c189"} Oct 01 05:48:49 crc kubenswrapper[4661]: I1001 05:48:49.279897 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0297f772-f42e-4830-a2f8-485658085c9d","Type":"ContainerStarted","Data":"b69be7efa0171fa77e93f7e86e5b77ea675d71c856e86edc8f074ae6762e764d"} Oct 01 05:48:49 crc kubenswrapper[4661]: I1001 05:48:49.866749 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-c77567f6f-m5g68" Oct 01 05:48:49 crc kubenswrapper[4661]: I1001 05:48:49.938069 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5cdff47b98-krjm2"] Oct 01 05:48:49 crc kubenswrapper[4661]: I1001 05:48:49.938279 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5cdff47b98-krjm2" podUID="fb9d74e6-1011-457e-aa3a-a46bf22a7af9" containerName="neutron-api" containerID="cri-o://0d310a0ac536ef88c6129c7c3dec41ff17d6c4732227fd4a212643d42a37eec7" gracePeriod=30 Oct 01 05:48:49 crc kubenswrapper[4661]: I1001 05:48:49.939458 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5cdff47b98-krjm2" podUID="fb9d74e6-1011-457e-aa3a-a46bf22a7af9" containerName="neutron-httpd" containerID="cri-o://9b77deecfa6dc37544873cfdd0e2391501b58c93c3b1e18a76ce1248f74e3679" gracePeriod=30 Oct 01 05:48:49 crc kubenswrapper[4661]: I1001 05:48:49.988275 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1ba1-account-create-w5rqs" Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.148585 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5967\" (UniqueName: \"kubernetes.io/projected/22866a21-7ff0-46cd-8bbe-5a133c012e47-kube-api-access-k5967\") pod \"22866a21-7ff0-46cd-8bbe-5a133c012e47\" (UID: \"22866a21-7ff0-46cd-8bbe-5a133c012e47\") " Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.163897 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22866a21-7ff0-46cd-8bbe-5a133c012e47-kube-api-access-k5967" (OuterVolumeSpecName: "kube-api-access-k5967") pod "22866a21-7ff0-46cd-8bbe-5a133c012e47" (UID: "22866a21-7ff0-46cd-8bbe-5a133c012e47"). InnerVolumeSpecName "kube-api-access-k5967". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.262074 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5967\" (UniqueName: \"kubernetes.io/projected/22866a21-7ff0-46cd-8bbe-5a133c012e47-kube-api-access-k5967\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.273876 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4f36-account-create-b6p98" Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.286383 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-e3fc-account-create-nj9hp" Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.343776 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"161c593c-0263-4ecb-a720-69366d51a827","Type":"ContainerStarted","Data":"d39c4b86922f2aa2018c804beb065f2dc658c372b5841eff6a2254127fa08433"} Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.351104 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-e3fc-account-create-nj9hp" event={"ID":"2a138f3b-8b69-490d-9e44-67d9b56247f2","Type":"ContainerDied","Data":"29e1af6519997690332b06733fbdd8b3e905ae7747cde6c3246adb5584a1d544"} Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.351135 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29e1af6519997690332b06733fbdd8b3e905ae7747cde6c3246adb5584a1d544" Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.351213 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-e3fc-account-create-nj9hp" Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.362559 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9qdb8\" (UniqueName: \"kubernetes.io/projected/2a138f3b-8b69-490d-9e44-67d9b56247f2-kube-api-access-9qdb8\") pod \"2a138f3b-8b69-490d-9e44-67d9b56247f2\" (UID: \"2a138f3b-8b69-490d-9e44-67d9b56247f2\") " Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.362652 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dq66c\" (UniqueName: \"kubernetes.io/projected/c0168d5b-fbba-4e5d-9efe-556278ffd191-kube-api-access-dq66c\") pod \"c0168d5b-fbba-4e5d-9efe-556278ffd191\" (UID: \"c0168d5b-fbba-4e5d-9efe-556278ffd191\") " Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.368697 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0168d5b-fbba-4e5d-9efe-556278ffd191-kube-api-access-dq66c" (OuterVolumeSpecName: "kube-api-access-dq66c") pod "c0168d5b-fbba-4e5d-9efe-556278ffd191" (UID: "c0168d5b-fbba-4e5d-9efe-556278ffd191"). InnerVolumeSpecName "kube-api-access-dq66c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.377179 4661 generic.go:334] "Generic (PLEG): container finished" podID="c221754f-65eb-4272-b438-d2d8591c3645" containerID="1efd1538ecb45e0543f9800d5d97592018eb1699c5cb24ba2c5ef1aa4c511481" exitCode=0 Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.377381 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75958fc765-qfvf9" event={"ID":"c221754f-65eb-4272-b438-d2d8591c3645","Type":"ContainerDied","Data":"1efd1538ecb45e0543f9800d5d97592018eb1699c5cb24ba2c5ef1aa4c511481"} Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.377407 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75958fc765-qfvf9" event={"ID":"c221754f-65eb-4272-b438-d2d8591c3645","Type":"ContainerStarted","Data":"6408279bd738f019edcdba2778cb9ff3401c2c27993c82fa50c6f8b1e4f860d5"} Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.383595 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a138f3b-8b69-490d-9e44-67d9b56247f2-kube-api-access-9qdb8" (OuterVolumeSpecName: "kube-api-access-9qdb8") pod "2a138f3b-8b69-490d-9e44-67d9b56247f2" (UID: "2a138f3b-8b69-490d-9e44-67d9b56247f2"). InnerVolumeSpecName "kube-api-access-9qdb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.389323 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4f36-account-create-b6p98" event={"ID":"c0168d5b-fbba-4e5d-9efe-556278ffd191","Type":"ContainerDied","Data":"af85527c52a2562e81f4d0e367201f4fd3f39b0f302aa387dda25df319fd2807"} Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.389351 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="af85527c52a2562e81f4d0e367201f4fd3f39b0f302aa387dda25df319fd2807" Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.389393 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4f36-account-create-b6p98" Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.396654 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-1ba1-account-create-w5rqs" event={"ID":"22866a21-7ff0-46cd-8bbe-5a133c012e47","Type":"ContainerDied","Data":"bdf69b51fe38c7fa141d61b8916e7c929ba5fd9518265a35033dc70df9fd7308"} Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.396683 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1ba1-account-create-w5rqs" Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.396691 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bdf69b51fe38c7fa141d61b8916e7c929ba5fd9518265a35033dc70df9fd7308" Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.467462 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9qdb8\" (UniqueName: \"kubernetes.io/projected/2a138f3b-8b69-490d-9e44-67d9b56247f2-kube-api-access-9qdb8\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.467491 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dq66c\" (UniqueName: \"kubernetes.io/projected/c0168d5b-fbba-4e5d-9efe-556278ffd191-kube-api-access-dq66c\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.761599 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.943184 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.943690 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="43063fe8-a9f2-4dc2-a82d-1111fa60404d" containerName="glance-log" containerID="cri-o://088a2b6beeab9b3e38f659d4387a6c5b416cf553fbd703c7b1c6a52abfca4456" gracePeriod=30 Oct 01 05:48:50 crc kubenswrapper[4661]: I1001 05:48:50.943843 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="43063fe8-a9f2-4dc2-a82d-1111fa60404d" containerName="glance-httpd" containerID="cri-o://90a40e5ba4cabeaf17cb7d3073eb0cdf3cd7460c92bb1861047fbadfef425898" gracePeriod=30 Oct 01 05:48:51 crc kubenswrapper[4661]: I1001 05:48:51.413154 4661 generic.go:334] "Generic (PLEG): container finished" podID="fb9d74e6-1011-457e-aa3a-a46bf22a7af9" containerID="9b77deecfa6dc37544873cfdd0e2391501b58c93c3b1e18a76ce1248f74e3679" exitCode=0 Oct 01 05:48:51 crc kubenswrapper[4661]: I1001 05:48:51.413224 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5cdff47b98-krjm2" event={"ID":"fb9d74e6-1011-457e-aa3a-a46bf22a7af9","Type":"ContainerDied","Data":"9b77deecfa6dc37544873cfdd0e2391501b58c93c3b1e18a76ce1248f74e3679"} Oct 01 05:48:51 crc kubenswrapper[4661]: I1001 05:48:51.415204 4661 generic.go:334] "Generic (PLEG): container finished" podID="43063fe8-a9f2-4dc2-a82d-1111fa60404d" containerID="088a2b6beeab9b3e38f659d4387a6c5b416cf553fbd703c7b1c6a52abfca4456" exitCode=143 Oct 01 05:48:51 crc kubenswrapper[4661]: I1001 05:48:51.415266 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"43063fe8-a9f2-4dc2-a82d-1111fa60404d","Type":"ContainerDied","Data":"088a2b6beeab9b3e38f659d4387a6c5b416cf553fbd703c7b1c6a52abfca4456"} Oct 01 05:48:51 crc kubenswrapper[4661]: I1001 05:48:51.415375 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:48:51 crc kubenswrapper[4661]: I1001 05:48:51.432942 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-75958fc765-qfvf9" podStartSLOduration=3.432926064 podStartE2EDuration="3.432926064s" podCreationTimestamp="2025-10-01 05:48:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:48:51.430812846 +0000 UTC m=+1180.368791460" watchObservedRunningTime="2025-10-01 05:48:51.432926064 +0000 UTC m=+1180.370904678" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.414327 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.439720 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"161c593c-0263-4ecb-a720-69366d51a827","Type":"ContainerStarted","Data":"13499505993d8bb88d907ba4e335b4727e65fbfbffbaa83acfa35fa1c56b3596"} Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.474162 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0297f772-f42e-4830-a2f8-485658085c9d","Type":"ContainerStarted","Data":"0e1c6deddb815d681287ddd78ab168264fd087387cc2fba212478e83f980d5d9"} Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.482462 4661 generic.go:334] "Generic (PLEG): container finished" podID="43063fe8-a9f2-4dc2-a82d-1111fa60404d" containerID="90a40e5ba4cabeaf17cb7d3073eb0cdf3cd7460c92bb1861047fbadfef425898" exitCode=0 Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.483317 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.483744 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"43063fe8-a9f2-4dc2-a82d-1111fa60404d","Type":"ContainerDied","Data":"90a40e5ba4cabeaf17cb7d3073eb0cdf3cd7460c92bb1861047fbadfef425898"} Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.483762 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"43063fe8-a9f2-4dc2-a82d-1111fa60404d","Type":"ContainerDied","Data":"d5cadb2f593454f3742e1e3b0b442f06b5d52f37099d1772bcef07527831fac5"} Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.483777 4661 scope.go:117] "RemoveContainer" containerID="90a40e5ba4cabeaf17cb7d3073eb0cdf3cd7460c92bb1861047fbadfef425898" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.517510 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-scripts\") pod \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.517624 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/43063fe8-a9f2-4dc2-a82d-1111fa60404d-httpd-run\") pod \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.517689 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-combined-ca-bundle\") pod \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.517713 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-config-data\") pod \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.517778 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-public-tls-certs\") pod \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.517823 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/43063fe8-a9f2-4dc2-a82d-1111fa60404d-logs\") pod \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.517864 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wc22r\" (UniqueName: \"kubernetes.io/projected/43063fe8-a9f2-4dc2-a82d-1111fa60404d-kube-api-access-wc22r\") pod \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.517896 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\" (UID: \"43063fe8-a9f2-4dc2-a82d-1111fa60404d\") " Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.526329 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "43063fe8-a9f2-4dc2-a82d-1111fa60404d" (UID: "43063fe8-a9f2-4dc2-a82d-1111fa60404d"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.526990 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43063fe8-a9f2-4dc2-a82d-1111fa60404d-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "43063fe8-a9f2-4dc2-a82d-1111fa60404d" (UID: "43063fe8-a9f2-4dc2-a82d-1111fa60404d"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.527130 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43063fe8-a9f2-4dc2-a82d-1111fa60404d-kube-api-access-wc22r" (OuterVolumeSpecName: "kube-api-access-wc22r") pod "43063fe8-a9f2-4dc2-a82d-1111fa60404d" (UID: "43063fe8-a9f2-4dc2-a82d-1111fa60404d"). InnerVolumeSpecName "kube-api-access-wc22r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.527278 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43063fe8-a9f2-4dc2-a82d-1111fa60404d-logs" (OuterVolumeSpecName: "logs") pod "43063fe8-a9f2-4dc2-a82d-1111fa60404d" (UID: "43063fe8-a9f2-4dc2-a82d-1111fa60404d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.527946 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-scripts" (OuterVolumeSpecName: "scripts") pod "43063fe8-a9f2-4dc2-a82d-1111fa60404d" (UID: "43063fe8-a9f2-4dc2-a82d-1111fa60404d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.536436 4661 scope.go:117] "RemoveContainer" containerID="088a2b6beeab9b3e38f659d4387a6c5b416cf553fbd703c7b1c6a52abfca4456" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.584429 4661 scope.go:117] "RemoveContainer" containerID="90a40e5ba4cabeaf17cb7d3073eb0cdf3cd7460c92bb1861047fbadfef425898" Oct 01 05:48:52 crc kubenswrapper[4661]: E1001 05:48:52.585510 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90a40e5ba4cabeaf17cb7d3073eb0cdf3cd7460c92bb1861047fbadfef425898\": container with ID starting with 90a40e5ba4cabeaf17cb7d3073eb0cdf3cd7460c92bb1861047fbadfef425898 not found: ID does not exist" containerID="90a40e5ba4cabeaf17cb7d3073eb0cdf3cd7460c92bb1861047fbadfef425898" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.585570 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90a40e5ba4cabeaf17cb7d3073eb0cdf3cd7460c92bb1861047fbadfef425898"} err="failed to get container status \"90a40e5ba4cabeaf17cb7d3073eb0cdf3cd7460c92bb1861047fbadfef425898\": rpc error: code = NotFound desc = could not find container \"90a40e5ba4cabeaf17cb7d3073eb0cdf3cd7460c92bb1861047fbadfef425898\": container with ID starting with 90a40e5ba4cabeaf17cb7d3073eb0cdf3cd7460c92bb1861047fbadfef425898 not found: ID does not exist" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.585597 4661 scope.go:117] "RemoveContainer" containerID="088a2b6beeab9b3e38f659d4387a6c5b416cf553fbd703c7b1c6a52abfca4456" Oct 01 05:48:52 crc kubenswrapper[4661]: E1001 05:48:52.587003 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"088a2b6beeab9b3e38f659d4387a6c5b416cf553fbd703c7b1c6a52abfca4456\": container with ID starting with 088a2b6beeab9b3e38f659d4387a6c5b416cf553fbd703c7b1c6a52abfca4456 not found: ID does not exist" containerID="088a2b6beeab9b3e38f659d4387a6c5b416cf553fbd703c7b1c6a52abfca4456" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.587028 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"088a2b6beeab9b3e38f659d4387a6c5b416cf553fbd703c7b1c6a52abfca4456"} err="failed to get container status \"088a2b6beeab9b3e38f659d4387a6c5b416cf553fbd703c7b1c6a52abfca4456\": rpc error: code = NotFound desc = could not find container \"088a2b6beeab9b3e38f659d4387a6c5b416cf553fbd703c7b1c6a52abfca4456\": container with ID starting with 088a2b6beeab9b3e38f659d4387a6c5b416cf553fbd703c7b1c6a52abfca4456 not found: ID does not exist" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.626924 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.626952 4661 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/43063fe8-a9f2-4dc2-a82d-1111fa60404d-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.626961 4661 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/43063fe8-a9f2-4dc2-a82d-1111fa60404d-logs\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.626972 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wc22r\" (UniqueName: \"kubernetes.io/projected/43063fe8-a9f2-4dc2-a82d-1111fa60404d-kube-api-access-wc22r\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.627003 4661 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.674509 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.674825 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="f05ecf1b-d793-43c1-a9b1-f83e11a14ecf" containerName="glance-httpd" containerID="cri-o://10d091cd3ef24b4abceb825ffef871176a5232bfc9bd93f25c89e39f3b7d43f6" gracePeriod=30 Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.675006 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="f05ecf1b-d793-43c1-a9b1-f83e11a14ecf" containerName="glance-log" containerID="cri-o://180dbde970bd2d19fcd43b70d2820b5f294807708c272589488401c5539d6066" gracePeriod=30 Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.686266 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.686320 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.693108 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "43063fe8-a9f2-4dc2-a82d-1111fa60404d" (UID: "43063fe8-a9f2-4dc2-a82d-1111fa60404d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.705499 4661 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.725391 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "43063fe8-a9f2-4dc2-a82d-1111fa60404d" (UID: "43063fe8-a9f2-4dc2-a82d-1111fa60404d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.732026 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.732057 4661 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.732068 4661 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.738180 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-config-data" (OuterVolumeSpecName: "config-data") pod "43063fe8-a9f2-4dc2-a82d-1111fa60404d" (UID: "43063fe8-a9f2-4dc2-a82d-1111fa60404d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.771000 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.834224 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43063fe8-a9f2-4dc2-a82d-1111fa60404d-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.871452 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.879359 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.899290 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 05:48:52 crc kubenswrapper[4661]: E1001 05:48:52.899660 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22866a21-7ff0-46cd-8bbe-5a133c012e47" containerName="mariadb-account-create" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.899678 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="22866a21-7ff0-46cd-8bbe-5a133c012e47" containerName="mariadb-account-create" Oct 01 05:48:52 crc kubenswrapper[4661]: E1001 05:48:52.899708 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43063fe8-a9f2-4dc2-a82d-1111fa60404d" containerName="glance-httpd" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.899715 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="43063fe8-a9f2-4dc2-a82d-1111fa60404d" containerName="glance-httpd" Oct 01 05:48:52 crc kubenswrapper[4661]: E1001 05:48:52.899737 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a138f3b-8b69-490d-9e44-67d9b56247f2" containerName="mariadb-account-create" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.899743 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a138f3b-8b69-490d-9e44-67d9b56247f2" containerName="mariadb-account-create" Oct 01 05:48:52 crc kubenswrapper[4661]: E1001 05:48:52.899754 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43063fe8-a9f2-4dc2-a82d-1111fa60404d" containerName="glance-log" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.899760 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="43063fe8-a9f2-4dc2-a82d-1111fa60404d" containerName="glance-log" Oct 01 05:48:52 crc kubenswrapper[4661]: E1001 05:48:52.899769 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0168d5b-fbba-4e5d-9efe-556278ffd191" containerName="mariadb-account-create" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.899775 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0168d5b-fbba-4e5d-9efe-556278ffd191" containerName="mariadb-account-create" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.907829 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="22866a21-7ff0-46cd-8bbe-5a133c012e47" containerName="mariadb-account-create" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.907858 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="43063fe8-a9f2-4dc2-a82d-1111fa60404d" containerName="glance-log" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.907876 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a138f3b-8b69-490d-9e44-67d9b56247f2" containerName="mariadb-account-create" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.907885 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="43063fe8-a9f2-4dc2-a82d-1111fa60404d" containerName="glance-httpd" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.907902 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0168d5b-fbba-4e5d-9efe-556278ffd191" containerName="mariadb-account-create" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.908902 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.908984 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.910979 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.911144 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.943360 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.943409 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e0802b8c-f0c0-4210-9618-ed452e52b5a0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.943445 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0802b8c-f0c0-4210-9618-ed452e52b5a0-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.943475 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0802b8c-f0c0-4210-9618-ed452e52b5a0-scripts\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.943502 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0802b8c-f0c0-4210-9618-ed452e52b5a0-config-data\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.943520 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmbgn\" (UniqueName: \"kubernetes.io/projected/e0802b8c-f0c0-4210-9618-ed452e52b5a0-kube-api-access-xmbgn\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.943559 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0802b8c-f0c0-4210-9618-ed452e52b5a0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:52 crc kubenswrapper[4661]: I1001 05:48:52.943602 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0802b8c-f0c0-4210-9618-ed452e52b5a0-logs\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.044798 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0802b8c-f0c0-4210-9618-ed452e52b5a0-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.044858 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0802b8c-f0c0-4210-9618-ed452e52b5a0-scripts\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.044889 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0802b8c-f0c0-4210-9618-ed452e52b5a0-config-data\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.044908 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmbgn\" (UniqueName: \"kubernetes.io/projected/e0802b8c-f0c0-4210-9618-ed452e52b5a0-kube-api-access-xmbgn\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.044951 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0802b8c-f0c0-4210-9618-ed452e52b5a0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.044995 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0802b8c-f0c0-4210-9618-ed452e52b5a0-logs\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.045031 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.045052 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e0802b8c-f0c0-4210-9618-ed452e52b5a0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.045541 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e0802b8c-f0c0-4210-9618-ed452e52b5a0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.046108 4661 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-external-api-0" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.046174 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0802b8c-f0c0-4210-9618-ed452e52b5a0-logs\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.067054 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0802b8c-f0c0-4210-9618-ed452e52b5a0-scripts\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.067458 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0802b8c-f0c0-4210-9618-ed452e52b5a0-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.067838 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0802b8c-f0c0-4210-9618-ed452e52b5a0-config-data\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.068433 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0802b8c-f0c0-4210-9618-ed452e52b5a0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.076300 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmbgn\" (UniqueName: \"kubernetes.io/projected/e0802b8c-f0c0-4210-9618-ed452e52b5a0-kube-api-access-xmbgn\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.095766 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"e0802b8c-f0c0-4210-9618-ed452e52b5a0\") " pod="openstack/glance-default-external-api-0" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.226933 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.512118 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0297f772-f42e-4830-a2f8-485658085c9d","Type":"ContainerStarted","Data":"a579a2fcc1a0b43bf463c9b1471cecab4b672c10055c942b0d1bb323880b7f9b"} Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.527258 4661 generic.go:334] "Generic (PLEG): container finished" podID="f05ecf1b-d793-43c1-a9b1-f83e11a14ecf" containerID="180dbde970bd2d19fcd43b70d2820b5f294807708c272589488401c5539d6066" exitCode=143 Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.527344 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf","Type":"ContainerDied","Data":"180dbde970bd2d19fcd43b70d2820b5f294807708c272589488401c5539d6066"} Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.529743 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="161c593c-0263-4ecb-a720-69366d51a827" containerName="cinder-api-log" containerID="cri-o://13499505993d8bb88d907ba4e335b4727e65fbfbffbaa83acfa35fa1c56b3596" gracePeriod=30 Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.529885 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"161c593c-0263-4ecb-a720-69366d51a827","Type":"ContainerStarted","Data":"70d2bd0701c0d416ecad527debf133edb5869359ce40ca82e94655fbc327746e"} Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.530009 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.529945 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="161c593c-0263-4ecb-a720-69366d51a827" containerName="cinder-api" containerID="cri-o://70d2bd0701c0d416ecad527debf133edb5869359ce40ca82e94655fbc327746e" gracePeriod=30 Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.542264 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.124848933 podStartE2EDuration="5.542246049s" podCreationTimestamp="2025-10-01 05:48:48 +0000 UTC" firstStartedPulling="2025-10-01 05:48:49.007453264 +0000 UTC m=+1177.945431878" lastFinishedPulling="2025-10-01 05:48:49.42485038 +0000 UTC m=+1178.362828994" observedRunningTime="2025-10-01 05:48:53.538929797 +0000 UTC m=+1182.476908411" watchObservedRunningTime="2025-10-01 05:48:53.542246049 +0000 UTC m=+1182.480224663" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.598114 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.620310 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.620273212 podStartE2EDuration="5.620273212s" podCreationTimestamp="2025-10-01 05:48:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:48:53.569490537 +0000 UTC m=+1182.507469151" watchObservedRunningTime="2025-10-01 05:48:53.620273212 +0000 UTC m=+1182.558251826" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.703362 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-decision-engine-0"] Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.779958 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43063fe8-a9f2-4dc2-a82d-1111fa60404d" path="/var/lib/kubelet/pods/43063fe8-a9f2-4dc2-a82d-1111fa60404d/volumes" Oct 01 05:48:53 crc kubenswrapper[4661]: I1001 05:48:53.861731 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 05:48:53 crc kubenswrapper[4661]: W1001 05:48:53.873026 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0802b8c_f0c0_4210_9618_ed452e52b5a0.slice/crio-9472e048a8451243cb77696a48e5848e71120ac533e0256d05a6fa5b1b951bf1 WatchSource:0}: Error finding container 9472e048a8451243cb77696a48e5848e71120ac533e0256d05a6fa5b1b951bf1: Status 404 returned error can't find the container with id 9472e048a8451243cb77696a48e5848e71120ac533e0256d05a6fa5b1b951bf1 Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.103726 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.175756 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-scripts\") pod \"161c593c-0263-4ecb-a720-69366d51a827\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.175903 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-combined-ca-bundle\") pod \"161c593c-0263-4ecb-a720-69366d51a827\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.175929 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-config-data-custom\") pod \"161c593c-0263-4ecb-a720-69366d51a827\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.175945 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tb8wd\" (UniqueName: \"kubernetes.io/projected/161c593c-0263-4ecb-a720-69366d51a827-kube-api-access-tb8wd\") pod \"161c593c-0263-4ecb-a720-69366d51a827\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.176003 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/161c593c-0263-4ecb-a720-69366d51a827-logs\") pod \"161c593c-0263-4ecb-a720-69366d51a827\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.176088 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-config-data\") pod \"161c593c-0263-4ecb-a720-69366d51a827\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.176138 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/161c593c-0263-4ecb-a720-69366d51a827-etc-machine-id\") pod \"161c593c-0263-4ecb-a720-69366d51a827\" (UID: \"161c593c-0263-4ecb-a720-69366d51a827\") " Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.176532 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/161c593c-0263-4ecb-a720-69366d51a827-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "161c593c-0263-4ecb-a720-69366d51a827" (UID: "161c593c-0263-4ecb-a720-69366d51a827"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.181886 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/161c593c-0263-4ecb-a720-69366d51a827-logs" (OuterVolumeSpecName: "logs") pod "161c593c-0263-4ecb-a720-69366d51a827" (UID: "161c593c-0263-4ecb-a720-69366d51a827"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.185156 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-scripts" (OuterVolumeSpecName: "scripts") pod "161c593c-0263-4ecb-a720-69366d51a827" (UID: "161c593c-0263-4ecb-a720-69366d51a827"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.187834 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/161c593c-0263-4ecb-a720-69366d51a827-kube-api-access-tb8wd" (OuterVolumeSpecName: "kube-api-access-tb8wd") pod "161c593c-0263-4ecb-a720-69366d51a827" (UID: "161c593c-0263-4ecb-a720-69366d51a827"). InnerVolumeSpecName "kube-api-access-tb8wd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.191559 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "161c593c-0263-4ecb-a720-69366d51a827" (UID: "161c593c-0263-4ecb-a720-69366d51a827"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.256825 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-config-data" (OuterVolumeSpecName: "config-data") pod "161c593c-0263-4ecb-a720-69366d51a827" (UID: "161c593c-0263-4ecb-a720-69366d51a827"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.257829 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "161c593c-0263-4ecb-a720-69366d51a827" (UID: "161c593c-0263-4ecb-a720-69366d51a827"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.278543 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.278583 4661 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/161c593c-0263-4ecb-a720-69366d51a827-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.278597 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.278608 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.278619 4661 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/161c593c-0263-4ecb-a720-69366d51a827-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.278645 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tb8wd\" (UniqueName: \"kubernetes.io/projected/161c593c-0263-4ecb-a720-69366d51a827-kube-api-access-tb8wd\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.278657 4661 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/161c593c-0263-4ecb-a720-69366d51a827-logs\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.553595 4661 generic.go:334] "Generic (PLEG): container finished" podID="f05ecf1b-d793-43c1-a9b1-f83e11a14ecf" containerID="10d091cd3ef24b4abceb825ffef871176a5232bfc9bd93f25c89e39f3b7d43f6" exitCode=0 Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.553688 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf","Type":"ContainerDied","Data":"10d091cd3ef24b4abceb825ffef871176a5232bfc9bd93f25c89e39f3b7d43f6"} Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.556483 4661 generic.go:334] "Generic (PLEG): container finished" podID="161c593c-0263-4ecb-a720-69366d51a827" containerID="70d2bd0701c0d416ecad527debf133edb5869359ce40ca82e94655fbc327746e" exitCode=0 Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.556513 4661 generic.go:334] "Generic (PLEG): container finished" podID="161c593c-0263-4ecb-a720-69366d51a827" containerID="13499505993d8bb88d907ba4e335b4727e65fbfbffbaa83acfa35fa1c56b3596" exitCode=143 Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.556563 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"161c593c-0263-4ecb-a720-69366d51a827","Type":"ContainerDied","Data":"70d2bd0701c0d416ecad527debf133edb5869359ce40ca82e94655fbc327746e"} Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.556639 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"161c593c-0263-4ecb-a720-69366d51a827","Type":"ContainerDied","Data":"13499505993d8bb88d907ba4e335b4727e65fbfbffbaa83acfa35fa1c56b3596"} Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.556653 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"161c593c-0263-4ecb-a720-69366d51a827","Type":"ContainerDied","Data":"d39c4b86922f2aa2018c804beb065f2dc658c372b5841eff6a2254127fa08433"} Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.556670 4661 scope.go:117] "RemoveContainer" containerID="70d2bd0701c0d416ecad527debf133edb5869359ce40ca82e94655fbc327746e" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.556797 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.572108 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e0802b8c-f0c0-4210-9618-ed452e52b5a0","Type":"ContainerStarted","Data":"9472e048a8451243cb77696a48e5848e71120ac533e0256d05a6fa5b1b951bf1"} Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.646119 4661 scope.go:117] "RemoveContainer" containerID="13499505993d8bb88d907ba4e335b4727e65fbfbffbaa83acfa35fa1c56b3596" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.647724 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.672026 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.685928 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 01 05:48:54 crc kubenswrapper[4661]: E1001 05:48:54.686356 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="161c593c-0263-4ecb-a720-69366d51a827" containerName="cinder-api-log" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.686367 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="161c593c-0263-4ecb-a720-69366d51a827" containerName="cinder-api-log" Oct 01 05:48:54 crc kubenswrapper[4661]: E1001 05:48:54.686390 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="161c593c-0263-4ecb-a720-69366d51a827" containerName="cinder-api" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.686398 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="161c593c-0263-4ecb-a720-69366d51a827" containerName="cinder-api" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.686572 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="161c593c-0263-4ecb-a720-69366d51a827" containerName="cinder-api" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.686599 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="161c593c-0263-4ecb-a720-69366d51a827" containerName="cinder-api-log" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.687592 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.698119 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.698268 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.708832 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-public-tls-certs\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.708940 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.708967 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqjh5\" (UniqueName: \"kubernetes.io/projected/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-kube-api-access-hqjh5\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.709022 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-scripts\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.709091 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-logs\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.709112 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-config-data\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.709161 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.709177 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-config-data-custom\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.709239 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.712552 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.714674 4661 scope.go:117] "RemoveContainer" containerID="70d2bd0701c0d416ecad527debf133edb5869359ce40ca82e94655fbc327746e" Oct 01 05:48:54 crc kubenswrapper[4661]: E1001 05:48:54.716939 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70d2bd0701c0d416ecad527debf133edb5869359ce40ca82e94655fbc327746e\": container with ID starting with 70d2bd0701c0d416ecad527debf133edb5869359ce40ca82e94655fbc327746e not found: ID does not exist" containerID="70d2bd0701c0d416ecad527debf133edb5869359ce40ca82e94655fbc327746e" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.716969 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70d2bd0701c0d416ecad527debf133edb5869359ce40ca82e94655fbc327746e"} err="failed to get container status \"70d2bd0701c0d416ecad527debf133edb5869359ce40ca82e94655fbc327746e\": rpc error: code = NotFound desc = could not find container \"70d2bd0701c0d416ecad527debf133edb5869359ce40ca82e94655fbc327746e\": container with ID starting with 70d2bd0701c0d416ecad527debf133edb5869359ce40ca82e94655fbc327746e not found: ID does not exist" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.716993 4661 scope.go:117] "RemoveContainer" containerID="13499505993d8bb88d907ba4e335b4727e65fbfbffbaa83acfa35fa1c56b3596" Oct 01 05:48:54 crc kubenswrapper[4661]: E1001 05:48:54.718328 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13499505993d8bb88d907ba4e335b4727e65fbfbffbaa83acfa35fa1c56b3596\": container with ID starting with 13499505993d8bb88d907ba4e335b4727e65fbfbffbaa83acfa35fa1c56b3596 not found: ID does not exist" containerID="13499505993d8bb88d907ba4e335b4727e65fbfbffbaa83acfa35fa1c56b3596" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.718374 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13499505993d8bb88d907ba4e335b4727e65fbfbffbaa83acfa35fa1c56b3596"} err="failed to get container status \"13499505993d8bb88d907ba4e335b4727e65fbfbffbaa83acfa35fa1c56b3596\": rpc error: code = NotFound desc = could not find container \"13499505993d8bb88d907ba4e335b4727e65fbfbffbaa83acfa35fa1c56b3596\": container with ID starting with 13499505993d8bb88d907ba4e335b4727e65fbfbffbaa83acfa35fa1c56b3596 not found: ID does not exist" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.718391 4661 scope.go:117] "RemoveContainer" containerID="70d2bd0701c0d416ecad527debf133edb5869359ce40ca82e94655fbc327746e" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.722786 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70d2bd0701c0d416ecad527debf133edb5869359ce40ca82e94655fbc327746e"} err="failed to get container status \"70d2bd0701c0d416ecad527debf133edb5869359ce40ca82e94655fbc327746e\": rpc error: code = NotFound desc = could not find container \"70d2bd0701c0d416ecad527debf133edb5869359ce40ca82e94655fbc327746e\": container with ID starting with 70d2bd0701c0d416ecad527debf133edb5869359ce40ca82e94655fbc327746e not found: ID does not exist" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.722808 4661 scope.go:117] "RemoveContainer" containerID="13499505993d8bb88d907ba4e335b4727e65fbfbffbaa83acfa35fa1c56b3596" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.725863 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13499505993d8bb88d907ba4e335b4727e65fbfbffbaa83acfa35fa1c56b3596"} err="failed to get container status \"13499505993d8bb88d907ba4e335b4727e65fbfbffbaa83acfa35fa1c56b3596\": rpc error: code = NotFound desc = could not find container \"13499505993d8bb88d907ba4e335b4727e65fbfbffbaa83acfa35fa1c56b3596\": container with ID starting with 13499505993d8bb88d907ba4e335b4727e65fbfbffbaa83acfa35fa1c56b3596 not found: ID does not exist" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.796507 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.813730 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.814598 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-config-data-custom\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.814692 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.814743 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-public-tls-certs\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.814818 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.814839 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqjh5\" (UniqueName: \"kubernetes.io/projected/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-kube-api-access-hqjh5\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.814889 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-scripts\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.814930 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-logs\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.814949 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-config-data\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.817102 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.824189 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-logs\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.826215 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.828450 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-config-data-custom\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.838394 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-scripts\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.840449 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-config-data\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.845849 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.850147 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-public-tls-certs\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.877554 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqjh5\" (UniqueName: \"kubernetes.io/projected/9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5-kube-api-access-hqjh5\") pod \"cinder-api-0\" (UID: \"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5\") " pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.914470 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.931166 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-cqms7"] Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.932449 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-cqms7" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.944641 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-k2mc8" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.944937 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.945084 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 01 05:48:54 crc kubenswrapper[4661]: I1001 05:48:54.978133 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-cqms7"] Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.018340 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/198fb851-8eef-40d8-9074-997436cc6373-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-cqms7\" (UID: \"198fb851-8eef-40d8-9074-997436cc6373\") " pod="openstack/nova-cell0-conductor-db-sync-cqms7" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.018461 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/198fb851-8eef-40d8-9074-997436cc6373-config-data\") pod \"nova-cell0-conductor-db-sync-cqms7\" (UID: \"198fb851-8eef-40d8-9074-997436cc6373\") " pod="openstack/nova-cell0-conductor-db-sync-cqms7" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.018498 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/198fb851-8eef-40d8-9074-997436cc6373-scripts\") pod \"nova-cell0-conductor-db-sync-cqms7\" (UID: \"198fb851-8eef-40d8-9074-997436cc6373\") " pod="openstack/nova-cell0-conductor-db-sync-cqms7" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.018572 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6lx2\" (UniqueName: \"kubernetes.io/projected/198fb851-8eef-40d8-9074-997436cc6373-kube-api-access-w6lx2\") pod \"nova-cell0-conductor-db-sync-cqms7\" (UID: \"198fb851-8eef-40d8-9074-997436cc6373\") " pod="openstack/nova-cell0-conductor-db-sync-cqms7" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.110791 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.131191 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/198fb851-8eef-40d8-9074-997436cc6373-config-data\") pod \"nova-cell0-conductor-db-sync-cqms7\" (UID: \"198fb851-8eef-40d8-9074-997436cc6373\") " pod="openstack/nova-cell0-conductor-db-sync-cqms7" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.131374 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/198fb851-8eef-40d8-9074-997436cc6373-scripts\") pod \"nova-cell0-conductor-db-sync-cqms7\" (UID: \"198fb851-8eef-40d8-9074-997436cc6373\") " pod="openstack/nova-cell0-conductor-db-sync-cqms7" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.131583 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6lx2\" (UniqueName: \"kubernetes.io/projected/198fb851-8eef-40d8-9074-997436cc6373-kube-api-access-w6lx2\") pod \"nova-cell0-conductor-db-sync-cqms7\" (UID: \"198fb851-8eef-40d8-9074-997436cc6373\") " pod="openstack/nova-cell0-conductor-db-sync-cqms7" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.131955 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/198fb851-8eef-40d8-9074-997436cc6373-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-cqms7\" (UID: \"198fb851-8eef-40d8-9074-997436cc6373\") " pod="openstack/nova-cell0-conductor-db-sync-cqms7" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.136305 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/198fb851-8eef-40d8-9074-997436cc6373-scripts\") pod \"nova-cell0-conductor-db-sync-cqms7\" (UID: \"198fb851-8eef-40d8-9074-997436cc6373\") " pod="openstack/nova-cell0-conductor-db-sync-cqms7" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.136983 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/198fb851-8eef-40d8-9074-997436cc6373-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-cqms7\" (UID: \"198fb851-8eef-40d8-9074-997436cc6373\") " pod="openstack/nova-cell0-conductor-db-sync-cqms7" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.141069 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/198fb851-8eef-40d8-9074-997436cc6373-config-data\") pod \"nova-cell0-conductor-db-sync-cqms7\" (UID: \"198fb851-8eef-40d8-9074-997436cc6373\") " pod="openstack/nova-cell0-conductor-db-sync-cqms7" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.159436 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6lx2\" (UniqueName: \"kubernetes.io/projected/198fb851-8eef-40d8-9074-997436cc6373-kube-api-access-w6lx2\") pod \"nova-cell0-conductor-db-sync-cqms7\" (UID: \"198fb851-8eef-40d8-9074-997436cc6373\") " pod="openstack/nova-cell0-conductor-db-sync-cqms7" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.233320 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzntc\" (UniqueName: \"kubernetes.io/projected/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-kube-api-access-nzntc\") pod \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.233459 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-internal-tls-certs\") pod \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.233485 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.233547 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-config-data\") pod \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.233606 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-httpd-run\") pod \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.233696 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-combined-ca-bundle\") pod \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.233753 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-scripts\") pod \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.233802 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-logs\") pod \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\" (UID: \"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf\") " Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.234071 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f05ecf1b-d793-43c1-a9b1-f83e11a14ecf" (UID: "f05ecf1b-d793-43c1-a9b1-f83e11a14ecf"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.234386 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-logs" (OuterVolumeSpecName: "logs") pod "f05ecf1b-d793-43c1-a9b1-f83e11a14ecf" (UID: "f05ecf1b-d793-43c1-a9b1-f83e11a14ecf"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.234484 4661 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.239782 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-kube-api-access-nzntc" (OuterVolumeSpecName: "kube-api-access-nzntc") pod "f05ecf1b-d793-43c1-a9b1-f83e11a14ecf" (UID: "f05ecf1b-d793-43c1-a9b1-f83e11a14ecf"). InnerVolumeSpecName "kube-api-access-nzntc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.239932 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-scripts" (OuterVolumeSpecName: "scripts") pod "f05ecf1b-d793-43c1-a9b1-f83e11a14ecf" (UID: "f05ecf1b-d793-43c1-a9b1-f83e11a14ecf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.252354 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "f05ecf1b-d793-43c1-a9b1-f83e11a14ecf" (UID: "f05ecf1b-d793-43c1-a9b1-f83e11a14ecf"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.263366 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-cqms7" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.286956 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f05ecf1b-d793-43c1-a9b1-f83e11a14ecf" (UID: "f05ecf1b-d793-43c1-a9b1-f83e11a14ecf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.336658 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.336685 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.336694 4661 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-logs\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.336702 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzntc\" (UniqueName: \"kubernetes.io/projected/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-kube-api-access-nzntc\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.336723 4661 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.351377 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f05ecf1b-d793-43c1-a9b1-f83e11a14ecf" (UID: "f05ecf1b-d793-43c1-a9b1-f83e11a14ecf"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.394517 4661 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.397726 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-config-data" (OuterVolumeSpecName: "config-data") pod "f05ecf1b-d793-43c1-a9b1-f83e11a14ecf" (UID: "f05ecf1b-d793-43c1-a9b1-f83e11a14ecf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.439269 4661 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.439302 4661 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.439312 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.579986 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.599519 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f05ecf1b-d793-43c1-a9b1-f83e11a14ecf","Type":"ContainerDied","Data":"86390f24ef6bb6b2aa07ae581fc2c5de9896a1c202b9469d1429619859335a1f"} Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.599890 4661 scope.go:117] "RemoveContainer" containerID="10d091cd3ef24b4abceb825ffef871176a5232bfc9bd93f25c89e39f3b7d43f6" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.600359 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.611257 4661 generic.go:334] "Generic (PLEG): container finished" podID="fb9d74e6-1011-457e-aa3a-a46bf22a7af9" containerID="0d310a0ac536ef88c6129c7c3dec41ff17d6c4732227fd4a212643d42a37eec7" exitCode=0 Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.611332 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5cdff47b98-krjm2" event={"ID":"fb9d74e6-1011-457e-aa3a-a46bf22a7af9","Type":"ContainerDied","Data":"0d310a0ac536ef88c6129c7c3dec41ff17d6c4732227fd4a212643d42a37eec7"} Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.614455 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-decision-engine-0" podUID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" containerName="watcher-decision-engine" containerID="cri-o://71cf4296b224f8f5ca458af2508a68d347ed20572d1386a3d28c38271e2d2e61" gracePeriod=30 Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.614669 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e0802b8c-f0c0-4210-9618-ed452e52b5a0","Type":"ContainerStarted","Data":"fbe432950c26b6b2714feca1d343b26e14de43453eab169db4c6016836816409"} Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.798192 4661 scope.go:117] "RemoveContainer" containerID="180dbde970bd2d19fcd43b70d2820b5f294807708c272589488401c5539d6066" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.818870 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="161c593c-0263-4ecb-a720-69366d51a827" path="/var/lib/kubelet/pods/161c593c-0263-4ecb-a720-69366d51a827/volumes" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.852835 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.881494 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.906448 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 05:48:55 crc kubenswrapper[4661]: E1001 05:48:55.906923 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f05ecf1b-d793-43c1-a9b1-f83e11a14ecf" containerName="glance-httpd" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.906939 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="f05ecf1b-d793-43c1-a9b1-f83e11a14ecf" containerName="glance-httpd" Oct 01 05:48:55 crc kubenswrapper[4661]: E1001 05:48:55.906957 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f05ecf1b-d793-43c1-a9b1-f83e11a14ecf" containerName="glance-log" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.906963 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="f05ecf1b-d793-43c1-a9b1-f83e11a14ecf" containerName="glance-log" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.907176 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="f05ecf1b-d793-43c1-a9b1-f83e11a14ecf" containerName="glance-httpd" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.907192 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="f05ecf1b-d793-43c1-a9b1-f83e11a14ecf" containerName="glance-log" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.908283 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.914333 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.917599 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-cqms7"] Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.926110 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 01 05:48:55 crc kubenswrapper[4661]: I1001 05:48:55.950678 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.048830 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5cdff47b98-krjm2" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.072725 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qv2vs\" (UniqueName: \"kubernetes.io/projected/6aa66ead-9f31-4644-a6ba-b3f6ddb82c64-kube-api-access-qv2vs\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.072778 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6aa66ead-9f31-4644-a6ba-b3f6ddb82c64-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.072806 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6aa66ead-9f31-4644-a6ba-b3f6ddb82c64-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.072829 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.072860 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6aa66ead-9f31-4644-a6ba-b3f6ddb82c64-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.072901 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6aa66ead-9f31-4644-a6ba-b3f6ddb82c64-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.072964 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6aa66ead-9f31-4644-a6ba-b3f6ddb82c64-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.073017 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6aa66ead-9f31-4644-a6ba-b3f6ddb82c64-logs\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.175074 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-ovndb-tls-certs\") pod \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\" (UID: \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\") " Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.175721 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2rrn\" (UniqueName: \"kubernetes.io/projected/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-kube-api-access-z2rrn\") pod \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\" (UID: \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\") " Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.175740 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-httpd-config\") pod \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\" (UID: \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\") " Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.175796 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-combined-ca-bundle\") pod \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\" (UID: \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\") " Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.175833 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-config\") pod \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\" (UID: \"fb9d74e6-1011-457e-aa3a-a46bf22a7af9\") " Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.176108 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6aa66ead-9f31-4644-a6ba-b3f6ddb82c64-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.176166 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6aa66ead-9f31-4644-a6ba-b3f6ddb82c64-logs\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.176204 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qv2vs\" (UniqueName: \"kubernetes.io/projected/6aa66ead-9f31-4644-a6ba-b3f6ddb82c64-kube-api-access-qv2vs\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.176265 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6aa66ead-9f31-4644-a6ba-b3f6ddb82c64-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.176284 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6aa66ead-9f31-4644-a6ba-b3f6ddb82c64-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.176303 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.176322 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6aa66ead-9f31-4644-a6ba-b3f6ddb82c64-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.176358 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6aa66ead-9f31-4644-a6ba-b3f6ddb82c64-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.180529 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6aa66ead-9f31-4644-a6ba-b3f6ddb82c64-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.181284 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6aa66ead-9f31-4644-a6ba-b3f6ddb82c64-logs\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.185242 4661 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.186574 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "fb9d74e6-1011-457e-aa3a-a46bf22a7af9" (UID: "fb9d74e6-1011-457e-aa3a-a46bf22a7af9"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.186831 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6aa66ead-9f31-4644-a6ba-b3f6ddb82c64-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.189477 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-kube-api-access-z2rrn" (OuterVolumeSpecName: "kube-api-access-z2rrn") pod "fb9d74e6-1011-457e-aa3a-a46bf22a7af9" (UID: "fb9d74e6-1011-457e-aa3a-a46bf22a7af9"). InnerVolumeSpecName "kube-api-access-z2rrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.190869 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6aa66ead-9f31-4644-a6ba-b3f6ddb82c64-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.191148 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6aa66ead-9f31-4644-a6ba-b3f6ddb82c64-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.198848 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6aa66ead-9f31-4644-a6ba-b3f6ddb82c64-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.222461 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qv2vs\" (UniqueName: \"kubernetes.io/projected/6aa66ead-9f31-4644-a6ba-b3f6ddb82c64-kube-api-access-qv2vs\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.224047 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64\") " pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.249676 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fb9d74e6-1011-457e-aa3a-a46bf22a7af9" (UID: "fb9d74e6-1011-457e-aa3a-a46bf22a7af9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.277697 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.277731 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2rrn\" (UniqueName: \"kubernetes.io/projected/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-kube-api-access-z2rrn\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.277745 4661 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.291834 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "fb9d74e6-1011-457e-aa3a-a46bf22a7af9" (UID: "fb9d74e6-1011-457e-aa3a-a46bf22a7af9"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.304794 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-config" (OuterVolumeSpecName: "config") pod "fb9d74e6-1011-457e-aa3a-a46bf22a7af9" (UID: "fb9d74e6-1011-457e-aa3a-a46bf22a7af9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.344361 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.379616 4661 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.379661 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/fb9d74e6-1011-457e-aa3a-a46bf22a7af9-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.662056 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5cdff47b98-krjm2" event={"ID":"fb9d74e6-1011-457e-aa3a-a46bf22a7af9","Type":"ContainerDied","Data":"035798de8b667530a600cf2be0db57721609b5ef28a3404f90ddc6a9f4b04e0e"} Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.662530 4661 scope.go:117] "RemoveContainer" containerID="9b77deecfa6dc37544873cfdd0e2391501b58c93c3b1e18a76ce1248f74e3679" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.662081 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5cdff47b98-krjm2" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.678564 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-cqms7" event={"ID":"198fb851-8eef-40d8-9074-997436cc6373","Type":"ContainerStarted","Data":"8f9d487b6043b2c27c6c5b96a3458b9d50aa402f12cc63e2844eeca6df681888"} Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.682028 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5","Type":"ContainerStarted","Data":"83961cb671d66947b07cf666cfcdd85e0c51df8c382306d9230cc1a0a1ed02a6"} Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.682120 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5","Type":"ContainerStarted","Data":"449fba3acd667cb3ff1d37bfe4522598065cdd907336d5dbe978973eda3cf20a"} Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.686922 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e0802b8c-f0c0-4210-9618-ed452e52b5a0","Type":"ContainerStarted","Data":"ed18b7ab4b25fb4d8d91ad46ab8e97117afa83148449b68650dbae3130bd89cf"} Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.743739 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.743722525 podStartE2EDuration="4.743722525s" podCreationTimestamp="2025-10-01 05:48:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:48:56.712370104 +0000 UTC m=+1185.650348728" watchObservedRunningTime="2025-10-01 05:48:56.743722525 +0000 UTC m=+1185.681701139" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.754935 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5cdff47b98-krjm2"] Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.755983 4661 scope.go:117] "RemoveContainer" containerID="0d310a0ac536ef88c6129c7c3dec41ff17d6c4732227fd4a212643d42a37eec7" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.763671 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5cdff47b98-krjm2"] Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.915784 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.916769 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f517e9fb-0e28-44c3-9d30-e97ec854be99" containerName="proxy-httpd" containerID="cri-o://8d55b6e6f80431fbc2cbf64f46c047797ee3eca6b9527684fcf09c807c26f708" gracePeriod=30 Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.916871 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f517e9fb-0e28-44c3-9d30-e97ec854be99" containerName="sg-core" containerID="cri-o://dfc3c198ca949bf63825c762f4853db619eca77644921c6a42f19fabad290419" gracePeriod=30 Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.916884 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f517e9fb-0e28-44c3-9d30-e97ec854be99" containerName="ceilometer-notification-agent" containerID="cri-o://3bc71c128935810138bfd2a17cd2b83202647bf4cd29a94298f45ee032f425b0" gracePeriod=30 Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.916497 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f517e9fb-0e28-44c3-9d30-e97ec854be99" containerName="ceilometer-central-agent" containerID="cri-o://0bfb35e257792c466c1e295e92f7a10a2b68a9df84fc4411291b08723c40f014" gracePeriod=30 Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.930783 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 01 05:48:56 crc kubenswrapper[4661]: I1001 05:48:56.935537 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 05:48:57 crc kubenswrapper[4661]: E1001 05:48:57.336338 4661 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf517e9fb_0e28_44c3_9d30_e97ec854be99.slice/crio-8d55b6e6f80431fbc2cbf64f46c047797ee3eca6b9527684fcf09c807c26f708.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf517e9fb_0e28_44c3_9d30_e97ec854be99.slice/crio-conmon-8d55b6e6f80431fbc2cbf64f46c047797ee3eca6b9527684fcf09c807c26f708.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8fdca12_5e6d_43d7_ae59_33b3a388ada4.slice/crio-7bd34b4a10d518e048f69aa9fbf1af80c2025a202af069f0593233df27c325d9\": RecentStats: unable to find data in memory cache]" Oct 01 05:48:57 crc kubenswrapper[4661]: I1001 05:48:57.703253 4661 generic.go:334] "Generic (PLEG): container finished" podID="f517e9fb-0e28-44c3-9d30-e97ec854be99" containerID="8d55b6e6f80431fbc2cbf64f46c047797ee3eca6b9527684fcf09c807c26f708" exitCode=0 Oct 01 05:48:57 crc kubenswrapper[4661]: I1001 05:48:57.703525 4661 generic.go:334] "Generic (PLEG): container finished" podID="f517e9fb-0e28-44c3-9d30-e97ec854be99" containerID="dfc3c198ca949bf63825c762f4853db619eca77644921c6a42f19fabad290419" exitCode=2 Oct 01 05:48:57 crc kubenswrapper[4661]: I1001 05:48:57.703404 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f517e9fb-0e28-44c3-9d30-e97ec854be99","Type":"ContainerDied","Data":"8d55b6e6f80431fbc2cbf64f46c047797ee3eca6b9527684fcf09c807c26f708"} Oct 01 05:48:57 crc kubenswrapper[4661]: I1001 05:48:57.703823 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f517e9fb-0e28-44c3-9d30-e97ec854be99","Type":"ContainerDied","Data":"dfc3c198ca949bf63825c762f4853db619eca77644921c6a42f19fabad290419"} Oct 01 05:48:57 crc kubenswrapper[4661]: I1001 05:48:57.703842 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f517e9fb-0e28-44c3-9d30-e97ec854be99","Type":"ContainerDied","Data":"0bfb35e257792c466c1e295e92f7a10a2b68a9df84fc4411291b08723c40f014"} Oct 01 05:48:57 crc kubenswrapper[4661]: I1001 05:48:57.703536 4661 generic.go:334] "Generic (PLEG): container finished" podID="f517e9fb-0e28-44c3-9d30-e97ec854be99" containerID="0bfb35e257792c466c1e295e92f7a10a2b68a9df84fc4411291b08723c40f014" exitCode=0 Oct 01 05:48:57 crc kubenswrapper[4661]: I1001 05:48:57.708218 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5","Type":"ContainerStarted","Data":"1c5d19e13d4456f57eb470751b48f35c595f5fc1859841f977afd8d309905250"} Oct 01 05:48:57 crc kubenswrapper[4661]: I1001 05:48:57.708294 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 01 05:48:57 crc kubenswrapper[4661]: I1001 05:48:57.711265 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64","Type":"ContainerStarted","Data":"c0219299587e7bcadbf400659c441bbc3c37394a2a1882bf1f189c2b3ae308ea"} Oct 01 05:48:57 crc kubenswrapper[4661]: I1001 05:48:57.711304 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64","Type":"ContainerStarted","Data":"d938b613a2db402507b1e7f8f29fe396954a342d4a11fa45c1aeb4dae511260c"} Oct 01 05:48:57 crc kubenswrapper[4661]: I1001 05:48:57.738058 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.73804224 podStartE2EDuration="3.73804224s" podCreationTimestamp="2025-10-01 05:48:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:48:57.735945243 +0000 UTC m=+1186.673923847" watchObservedRunningTime="2025-10-01 05:48:57.73804224 +0000 UTC m=+1186.676020854" Oct 01 05:48:57 crc kubenswrapper[4661]: I1001 05:48:57.816238 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f05ecf1b-d793-43c1-a9b1-f83e11a14ecf" path="/var/lib/kubelet/pods/f05ecf1b-d793-43c1-a9b1-f83e11a14ecf/volumes" Oct 01 05:48:57 crc kubenswrapper[4661]: I1001 05:48:57.818369 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb9d74e6-1011-457e-aa3a-a46bf22a7af9" path="/var/lib/kubelet/pods/fb9d74e6-1011-457e-aa3a-a46bf22a7af9/volumes" Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.388477 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.472872 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.526723 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84c68846bf-rr6w9"] Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.526940 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" podUID="97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb" containerName="dnsmasq-dns" containerID="cri-o://b34bfd783b18cf29a24a92500d0351aaacd9eac86fcf98629b596e31b9f4b411" gracePeriod=10 Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.615312 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.734203 4661 generic.go:334] "Generic (PLEG): container finished" podID="97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb" containerID="b34bfd783b18cf29a24a92500d0351aaacd9eac86fcf98629b596e31b9f4b411" exitCode=0 Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.734270 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" event={"ID":"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb","Type":"ContainerDied","Data":"b34bfd783b18cf29a24a92500d0351aaacd9eac86fcf98629b596e31b9f4b411"} Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.737263 4661 generic.go:334] "Generic (PLEG): container finished" podID="f517e9fb-0e28-44c3-9d30-e97ec854be99" containerID="3bc71c128935810138bfd2a17cd2b83202647bf4cd29a94298f45ee032f425b0" exitCode=0 Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.737315 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f517e9fb-0e28-44c3-9d30-e97ec854be99","Type":"ContainerDied","Data":"3bc71c128935810138bfd2a17cd2b83202647bf4cd29a94298f45ee032f425b0"} Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.737336 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f517e9fb-0e28-44c3-9d30-e97ec854be99","Type":"ContainerDied","Data":"e716c2f34481a0fb315b5fa7b2ef92c38826914714c8aa5be8c6a057871d9bb3"} Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.737347 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e716c2f34481a0fb315b5fa7b2ef92c38826914714c8aa5be8c6a057871d9bb3" Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.755830 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6aa66ead-9f31-4644-a6ba-b3f6ddb82c64","Type":"ContainerStarted","Data":"ea0c8c700560b5fa45037bd5eaa2724344c85a6002801a6b00601a1bd111e6b7"} Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.787623 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.815210 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.815188981 podStartE2EDuration="3.815188981s" podCreationTimestamp="2025-10-01 05:48:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:48:58.792118557 +0000 UTC m=+1187.730097171" watchObservedRunningTime="2025-10-01 05:48:58.815188981 +0000 UTC m=+1187.753167595" Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.864732 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.945835 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9kmd2\" (UniqueName: \"kubernetes.io/projected/f517e9fb-0e28-44c3-9d30-e97ec854be99-kube-api-access-9kmd2\") pod \"f517e9fb-0e28-44c3-9d30-e97ec854be99\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.946891 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-scripts\") pod \"f517e9fb-0e28-44c3-9d30-e97ec854be99\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.949548 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f517e9fb-0e28-44c3-9d30-e97ec854be99-run-httpd\") pod \"f517e9fb-0e28-44c3-9d30-e97ec854be99\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.949761 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f517e9fb-0e28-44c3-9d30-e97ec854be99-log-httpd\") pod \"f517e9fb-0e28-44c3-9d30-e97ec854be99\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.949807 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-config-data\") pod \"f517e9fb-0e28-44c3-9d30-e97ec854be99\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.950993 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f517e9fb-0e28-44c3-9d30-e97ec854be99-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f517e9fb-0e28-44c3-9d30-e97ec854be99" (UID: "f517e9fb-0e28-44c3-9d30-e97ec854be99"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.949850 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-sg-core-conf-yaml\") pod \"f517e9fb-0e28-44c3-9d30-e97ec854be99\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.951174 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-combined-ca-bundle\") pod \"f517e9fb-0e28-44c3-9d30-e97ec854be99\" (UID: \"f517e9fb-0e28-44c3-9d30-e97ec854be99\") " Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.954433 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f517e9fb-0e28-44c3-9d30-e97ec854be99-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f517e9fb-0e28-44c3-9d30-e97ec854be99" (UID: "f517e9fb-0e28-44c3-9d30-e97ec854be99"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.955788 4661 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f517e9fb-0e28-44c3-9d30-e97ec854be99-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.955810 4661 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f517e9fb-0e28-44c3-9d30-e97ec854be99-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.957414 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f517e9fb-0e28-44c3-9d30-e97ec854be99-kube-api-access-9kmd2" (OuterVolumeSpecName: "kube-api-access-9kmd2") pod "f517e9fb-0e28-44c3-9d30-e97ec854be99" (UID: "f517e9fb-0e28-44c3-9d30-e97ec854be99"). InnerVolumeSpecName "kube-api-access-9kmd2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:58 crc kubenswrapper[4661]: I1001 05:48:58.983885 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-scripts" (OuterVolumeSpecName: "scripts") pod "f517e9fb-0e28-44c3-9d30-e97ec854be99" (UID: "f517e9fb-0e28-44c3-9d30-e97ec854be99"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.047456 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f517e9fb-0e28-44c3-9d30-e97ec854be99" (UID: "f517e9fb-0e28-44c3-9d30-e97ec854be99"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.058261 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9kmd2\" (UniqueName: \"kubernetes.io/projected/f517e9fb-0e28-44c3-9d30-e97ec854be99-kube-api-access-9kmd2\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.058310 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.058322 4661 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.078498 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f517e9fb-0e28-44c3-9d30-e97ec854be99" (UID: "f517e9fb-0e28-44c3-9d30-e97ec854be99"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.150804 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-config-data" (OuterVolumeSpecName: "config-data") pod "f517e9fb-0e28-44c3-9d30-e97ec854be99" (UID: "f517e9fb-0e28-44c3-9d30-e97ec854be99"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.153304 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.159746 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.159778 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f517e9fb-0e28-44c3-9d30-e97ec854be99-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.260955 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-config\") pod \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.261023 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-ovsdbserver-sb\") pod \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.261080 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4nxh\" (UniqueName: \"kubernetes.io/projected/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-kube-api-access-h4nxh\") pod \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.261158 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-ovsdbserver-nb\") pod \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.261523 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-dns-svc\") pod \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.261565 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-dns-swift-storage-0\") pod \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\" (UID: \"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb\") " Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.272176 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-kube-api-access-h4nxh" (OuterVolumeSpecName: "kube-api-access-h4nxh") pod "97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb" (UID: "97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb"). InnerVolumeSpecName "kube-api-access-h4nxh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.316237 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-config" (OuterVolumeSpecName: "config") pod "97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb" (UID: "97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.325915 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb" (UID: "97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.338602 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb" (UID: "97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.345231 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb" (UID: "97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.359455 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb" (UID: "97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.364512 4661 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.364547 4661 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.364557 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.364566 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.364575 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4nxh\" (UniqueName: \"kubernetes.io/projected/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-kube-api-access-h4nxh\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.364586 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.770166 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.770243 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.771465 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c68846bf-rr6w9" event={"ID":"97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb","Type":"ContainerDied","Data":"d907e361b5510b0131635085b8665463bc9ee969920335c30278d97929311611"} Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.771502 4661 scope.go:117] "RemoveContainer" containerID="b34bfd783b18cf29a24a92500d0351aaacd9eac86fcf98629b596e31b9f4b411" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.772110 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="0297f772-f42e-4830-a2f8-485658085c9d" containerName="probe" containerID="cri-o://a579a2fcc1a0b43bf463c9b1471cecab4b672c10055c942b0d1bb323880b7f9b" gracePeriod=30 Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.772107 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="0297f772-f42e-4830-a2f8-485658085c9d" containerName="cinder-scheduler" containerID="cri-o://0e1c6deddb815d681287ddd78ab168264fd087387cc2fba212478e83f980d5d9" gracePeriod=30 Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.815108 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84c68846bf-rr6w9"] Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.830174 4661 scope.go:117] "RemoveContainer" containerID="b5a86118cc91b4cddaf468a76cefebbf371ca1f9bf85f5fb75cc64f20ff68571" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.868070 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84c68846bf-rr6w9"] Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.876062 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.883918 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.891524 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:48:59 crc kubenswrapper[4661]: E1001 05:48:59.891954 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb" containerName="dnsmasq-dns" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.891966 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb" containerName="dnsmasq-dns" Oct 01 05:48:59 crc kubenswrapper[4661]: E1001 05:48:59.891984 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f517e9fb-0e28-44c3-9d30-e97ec854be99" containerName="proxy-httpd" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.891991 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="f517e9fb-0e28-44c3-9d30-e97ec854be99" containerName="proxy-httpd" Oct 01 05:48:59 crc kubenswrapper[4661]: E1001 05:48:59.892008 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb9d74e6-1011-457e-aa3a-a46bf22a7af9" containerName="neutron-api" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.892013 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb9d74e6-1011-457e-aa3a-a46bf22a7af9" containerName="neutron-api" Oct 01 05:48:59 crc kubenswrapper[4661]: E1001 05:48:59.892027 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f517e9fb-0e28-44c3-9d30-e97ec854be99" containerName="sg-core" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.892033 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="f517e9fb-0e28-44c3-9d30-e97ec854be99" containerName="sg-core" Oct 01 05:48:59 crc kubenswrapper[4661]: E1001 05:48:59.892049 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb9d74e6-1011-457e-aa3a-a46bf22a7af9" containerName="neutron-httpd" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.892054 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb9d74e6-1011-457e-aa3a-a46bf22a7af9" containerName="neutron-httpd" Oct 01 05:48:59 crc kubenswrapper[4661]: E1001 05:48:59.892092 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f517e9fb-0e28-44c3-9d30-e97ec854be99" containerName="ceilometer-central-agent" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.892098 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="f517e9fb-0e28-44c3-9d30-e97ec854be99" containerName="ceilometer-central-agent" Oct 01 05:48:59 crc kubenswrapper[4661]: E1001 05:48:59.892125 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb" containerName="init" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.892132 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb" containerName="init" Oct 01 05:48:59 crc kubenswrapper[4661]: E1001 05:48:59.892146 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f517e9fb-0e28-44c3-9d30-e97ec854be99" containerName="ceilometer-notification-agent" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.892151 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="f517e9fb-0e28-44c3-9d30-e97ec854be99" containerName="ceilometer-notification-agent" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.892313 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="f517e9fb-0e28-44c3-9d30-e97ec854be99" containerName="proxy-httpd" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.892329 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="f517e9fb-0e28-44c3-9d30-e97ec854be99" containerName="ceilometer-notification-agent" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.892342 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb9d74e6-1011-457e-aa3a-a46bf22a7af9" containerName="neutron-api" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.892350 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb" containerName="dnsmasq-dns" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.892362 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="f517e9fb-0e28-44c3-9d30-e97ec854be99" containerName="ceilometer-central-agent" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.892371 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="f517e9fb-0e28-44c3-9d30-e97ec854be99" containerName="sg-core" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.892380 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb9d74e6-1011-457e-aa3a-a46bf22a7af9" containerName="neutron-httpd" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.894055 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.896786 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.896973 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.899024 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.974676 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " pod="openstack/ceilometer-0" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.974719 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-config-data\") pod \"ceilometer-0\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " pod="openstack/ceilometer-0" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.974799 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-run-httpd\") pod \"ceilometer-0\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " pod="openstack/ceilometer-0" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.974827 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-log-httpd\") pod \"ceilometer-0\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " pod="openstack/ceilometer-0" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.974860 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " pod="openstack/ceilometer-0" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.974889 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-scripts\") pod \"ceilometer-0\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " pod="openstack/ceilometer-0" Oct 01 05:48:59 crc kubenswrapper[4661]: I1001 05:48:59.974910 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xll24\" (UniqueName: \"kubernetes.io/projected/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-kube-api-access-xll24\") pod \"ceilometer-0\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " pod="openstack/ceilometer-0" Oct 01 05:49:00 crc kubenswrapper[4661]: I1001 05:49:00.075963 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " pod="openstack/ceilometer-0" Oct 01 05:49:00 crc kubenswrapper[4661]: I1001 05:49:00.076007 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-config-data\") pod \"ceilometer-0\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " pod="openstack/ceilometer-0" Oct 01 05:49:00 crc kubenswrapper[4661]: I1001 05:49:00.076079 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-run-httpd\") pod \"ceilometer-0\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " pod="openstack/ceilometer-0" Oct 01 05:49:00 crc kubenswrapper[4661]: I1001 05:49:00.076106 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-log-httpd\") pod \"ceilometer-0\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " pod="openstack/ceilometer-0" Oct 01 05:49:00 crc kubenswrapper[4661]: I1001 05:49:00.076135 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " pod="openstack/ceilometer-0" Oct 01 05:49:00 crc kubenswrapper[4661]: I1001 05:49:00.076161 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-scripts\") pod \"ceilometer-0\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " pod="openstack/ceilometer-0" Oct 01 05:49:00 crc kubenswrapper[4661]: I1001 05:49:00.076184 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xll24\" (UniqueName: \"kubernetes.io/projected/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-kube-api-access-xll24\") pod \"ceilometer-0\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " pod="openstack/ceilometer-0" Oct 01 05:49:00 crc kubenswrapper[4661]: I1001 05:49:00.077430 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-log-httpd\") pod \"ceilometer-0\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " pod="openstack/ceilometer-0" Oct 01 05:49:00 crc kubenswrapper[4661]: I1001 05:49:00.077829 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-run-httpd\") pod \"ceilometer-0\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " pod="openstack/ceilometer-0" Oct 01 05:49:00 crc kubenswrapper[4661]: I1001 05:49:00.081044 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " pod="openstack/ceilometer-0" Oct 01 05:49:00 crc kubenswrapper[4661]: I1001 05:49:00.081273 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-scripts\") pod \"ceilometer-0\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " pod="openstack/ceilometer-0" Oct 01 05:49:00 crc kubenswrapper[4661]: I1001 05:49:00.082324 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-config-data\") pod \"ceilometer-0\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " pod="openstack/ceilometer-0" Oct 01 05:49:00 crc kubenswrapper[4661]: I1001 05:49:00.087754 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " pod="openstack/ceilometer-0" Oct 01 05:49:00 crc kubenswrapper[4661]: I1001 05:49:00.098571 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xll24\" (UniqueName: \"kubernetes.io/projected/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-kube-api-access-xll24\") pod \"ceilometer-0\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " pod="openstack/ceilometer-0" Oct 01 05:49:00 crc kubenswrapper[4661]: I1001 05:49:00.238050 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 05:49:00 crc kubenswrapper[4661]: I1001 05:49:00.706763 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:49:00 crc kubenswrapper[4661]: W1001 05:49:00.711162 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb958c7b7_f355_4af9_a33d_e95e2f1b5e02.slice/crio-3430fa1d3ad628adb3b2c694b35f15f076f459d81f40750410aca3c5c1fccdd9 WatchSource:0}: Error finding container 3430fa1d3ad628adb3b2c694b35f15f076f459d81f40750410aca3c5c1fccdd9: Status 404 returned error can't find the container with id 3430fa1d3ad628adb3b2c694b35f15f076f459d81f40750410aca3c5c1fccdd9 Oct 01 05:49:00 crc kubenswrapper[4661]: I1001 05:49:00.785284 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b958c7b7-f355-4af9-a33d-e95e2f1b5e02","Type":"ContainerStarted","Data":"3430fa1d3ad628adb3b2c694b35f15f076f459d81f40750410aca3c5c1fccdd9"} Oct 01 05:49:00 crc kubenswrapper[4661]: I1001 05:49:00.796845 4661 generic.go:334] "Generic (PLEG): container finished" podID="0297f772-f42e-4830-a2f8-485658085c9d" containerID="a579a2fcc1a0b43bf463c9b1471cecab4b672c10055c942b0d1bb323880b7f9b" exitCode=0 Oct 01 05:49:00 crc kubenswrapper[4661]: I1001 05:49:00.796886 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0297f772-f42e-4830-a2f8-485658085c9d","Type":"ContainerDied","Data":"a579a2fcc1a0b43bf463c9b1471cecab4b672c10055c942b0d1bb323880b7f9b"} Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.544104 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.601881 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0297f772-f42e-4830-a2f8-485658085c9d-etc-machine-id\") pod \"0297f772-f42e-4830-a2f8-485658085c9d\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.601944 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-config-data-custom\") pod \"0297f772-f42e-4830-a2f8-485658085c9d\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.602021 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0297f772-f42e-4830-a2f8-485658085c9d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "0297f772-f42e-4830-a2f8-485658085c9d" (UID: "0297f772-f42e-4830-a2f8-485658085c9d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.602165 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hkqpq\" (UniqueName: \"kubernetes.io/projected/0297f772-f42e-4830-a2f8-485658085c9d-kube-api-access-hkqpq\") pod \"0297f772-f42e-4830-a2f8-485658085c9d\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.602224 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-combined-ca-bundle\") pod \"0297f772-f42e-4830-a2f8-485658085c9d\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.602252 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-config-data\") pod \"0297f772-f42e-4830-a2f8-485658085c9d\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.602305 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-scripts\") pod \"0297f772-f42e-4830-a2f8-485658085c9d\" (UID: \"0297f772-f42e-4830-a2f8-485658085c9d\") " Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.602740 4661 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0297f772-f42e-4830-a2f8-485658085c9d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.606049 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-scripts" (OuterVolumeSpecName: "scripts") pod "0297f772-f42e-4830-a2f8-485658085c9d" (UID: "0297f772-f42e-4830-a2f8-485658085c9d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.607827 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0297f772-f42e-4830-a2f8-485658085c9d-kube-api-access-hkqpq" (OuterVolumeSpecName: "kube-api-access-hkqpq") pod "0297f772-f42e-4830-a2f8-485658085c9d" (UID: "0297f772-f42e-4830-a2f8-485658085c9d"). InnerVolumeSpecName "kube-api-access-hkqpq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.615622 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0297f772-f42e-4830-a2f8-485658085c9d" (UID: "0297f772-f42e-4830-a2f8-485658085c9d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.683977 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0297f772-f42e-4830-a2f8-485658085c9d" (UID: "0297f772-f42e-4830-a2f8-485658085c9d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.706240 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hkqpq\" (UniqueName: \"kubernetes.io/projected/0297f772-f42e-4830-a2f8-485658085c9d-kube-api-access-hkqpq\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.706275 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.706284 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.706294 4661 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.727770 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-config-data" (OuterVolumeSpecName: "config-data") pod "0297f772-f42e-4830-a2f8-485658085c9d" (UID: "0297f772-f42e-4830-a2f8-485658085c9d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.768414 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb" path="/var/lib/kubelet/pods/97dd6a7e-6da4-4f86-bf48-4bb9166cc5fb/volumes" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.769125 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f517e9fb-0e28-44c3-9d30-e97ec854be99" path="/var/lib/kubelet/pods/f517e9fb-0e28-44c3-9d30-e97ec854be99/volumes" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.807648 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0297f772-f42e-4830-a2f8-485658085c9d-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.812451 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b958c7b7-f355-4af9-a33d-e95e2f1b5e02","Type":"ContainerStarted","Data":"06b958966968ecde7f58e4712d5ad6e6c29db511fc718dbafc8ab1806e02f6d9"} Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.812501 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b958c7b7-f355-4af9-a33d-e95e2f1b5e02","Type":"ContainerStarted","Data":"5d9d5191af7fdafaa6245701c19b5ccfd388e2d24756de94ba1c423e74929569"} Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.815105 4661 generic.go:334] "Generic (PLEG): container finished" podID="0297f772-f42e-4830-a2f8-485658085c9d" containerID="0e1c6deddb815d681287ddd78ab168264fd087387cc2fba212478e83f980d5d9" exitCode=0 Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.815168 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0297f772-f42e-4830-a2f8-485658085c9d","Type":"ContainerDied","Data":"0e1c6deddb815d681287ddd78ab168264fd087387cc2fba212478e83f980d5d9"} Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.815220 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0297f772-f42e-4830-a2f8-485658085c9d","Type":"ContainerDied","Data":"b69be7efa0171fa77e93f7e86e5b77ea675d71c856e86edc8f074ae6762e764d"} Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.815238 4661 scope.go:117] "RemoveContainer" containerID="a579a2fcc1a0b43bf463c9b1471cecab4b672c10055c942b0d1bb323880b7f9b" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.815395 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.861943 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.880864 4661 scope.go:117] "RemoveContainer" containerID="0e1c6deddb815d681287ddd78ab168264fd087387cc2fba212478e83f980d5d9" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.883228 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.892201 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 05:49:01 crc kubenswrapper[4661]: E1001 05:49:01.893187 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0297f772-f42e-4830-a2f8-485658085c9d" containerName="cinder-scheduler" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.893206 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="0297f772-f42e-4830-a2f8-485658085c9d" containerName="cinder-scheduler" Oct 01 05:49:01 crc kubenswrapper[4661]: E1001 05:49:01.893232 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0297f772-f42e-4830-a2f8-485658085c9d" containerName="probe" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.893239 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="0297f772-f42e-4830-a2f8-485658085c9d" containerName="probe" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.893439 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="0297f772-f42e-4830-a2f8-485658085c9d" containerName="cinder-scheduler" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.893456 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="0297f772-f42e-4830-a2f8-485658085c9d" containerName="probe" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.909812 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.909925 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.913919 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.914017 4661 scope.go:117] "RemoveContainer" containerID="a579a2fcc1a0b43bf463c9b1471cecab4b672c10055c942b0d1bb323880b7f9b" Oct 01 05:49:01 crc kubenswrapper[4661]: E1001 05:49:01.914520 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a579a2fcc1a0b43bf463c9b1471cecab4b672c10055c942b0d1bb323880b7f9b\": container with ID starting with a579a2fcc1a0b43bf463c9b1471cecab4b672c10055c942b0d1bb323880b7f9b not found: ID does not exist" containerID="a579a2fcc1a0b43bf463c9b1471cecab4b672c10055c942b0d1bb323880b7f9b" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.914679 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a579a2fcc1a0b43bf463c9b1471cecab4b672c10055c942b0d1bb323880b7f9b"} err="failed to get container status \"a579a2fcc1a0b43bf463c9b1471cecab4b672c10055c942b0d1bb323880b7f9b\": rpc error: code = NotFound desc = could not find container \"a579a2fcc1a0b43bf463c9b1471cecab4b672c10055c942b0d1bb323880b7f9b\": container with ID starting with a579a2fcc1a0b43bf463c9b1471cecab4b672c10055c942b0d1bb323880b7f9b not found: ID does not exist" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.914839 4661 scope.go:117] "RemoveContainer" containerID="0e1c6deddb815d681287ddd78ab168264fd087387cc2fba212478e83f980d5d9" Oct 01 05:49:01 crc kubenswrapper[4661]: E1001 05:49:01.915292 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e1c6deddb815d681287ddd78ab168264fd087387cc2fba212478e83f980d5d9\": container with ID starting with 0e1c6deddb815d681287ddd78ab168264fd087387cc2fba212478e83f980d5d9 not found: ID does not exist" containerID="0e1c6deddb815d681287ddd78ab168264fd087387cc2fba212478e83f980d5d9" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.915395 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e1c6deddb815d681287ddd78ab168264fd087387cc2fba212478e83f980d5d9"} err="failed to get container status \"0e1c6deddb815d681287ddd78ab168264fd087387cc2fba212478e83f980d5d9\": rpc error: code = NotFound desc = could not find container \"0e1c6deddb815d681287ddd78ab168264fd087387cc2fba212478e83f980d5d9\": container with ID starting with 0e1c6deddb815d681287ddd78ab168264fd087387cc2fba212478e83f980d5d9 not found: ID does not exist" Oct 01 05:49:01 crc kubenswrapper[4661]: I1001 05:49:01.991312 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:49:02 crc kubenswrapper[4661]: I1001 05:49:02.010879 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmj9b\" (UniqueName: \"kubernetes.io/projected/29f0e9e2-bbd8-4459-bc99-db5d742a37b8-kube-api-access-jmj9b\") pod \"cinder-scheduler-0\" (UID: \"29f0e9e2-bbd8-4459-bc99-db5d742a37b8\") " pod="openstack/cinder-scheduler-0" Oct 01 05:49:02 crc kubenswrapper[4661]: I1001 05:49:02.011164 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29f0e9e2-bbd8-4459-bc99-db5d742a37b8-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"29f0e9e2-bbd8-4459-bc99-db5d742a37b8\") " pod="openstack/cinder-scheduler-0" Oct 01 05:49:02 crc kubenswrapper[4661]: I1001 05:49:02.011226 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29f0e9e2-bbd8-4459-bc99-db5d742a37b8-scripts\") pod \"cinder-scheduler-0\" (UID: \"29f0e9e2-bbd8-4459-bc99-db5d742a37b8\") " pod="openstack/cinder-scheduler-0" Oct 01 05:49:02 crc kubenswrapper[4661]: I1001 05:49:02.011301 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/29f0e9e2-bbd8-4459-bc99-db5d742a37b8-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"29f0e9e2-bbd8-4459-bc99-db5d742a37b8\") " pod="openstack/cinder-scheduler-0" Oct 01 05:49:02 crc kubenswrapper[4661]: I1001 05:49:02.011325 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29f0e9e2-bbd8-4459-bc99-db5d742a37b8-config-data\") pod \"cinder-scheduler-0\" (UID: \"29f0e9e2-bbd8-4459-bc99-db5d742a37b8\") " pod="openstack/cinder-scheduler-0" Oct 01 05:49:02 crc kubenswrapper[4661]: I1001 05:49:02.011348 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/29f0e9e2-bbd8-4459-bc99-db5d742a37b8-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"29f0e9e2-bbd8-4459-bc99-db5d742a37b8\") " pod="openstack/cinder-scheduler-0" Oct 01 05:49:02 crc kubenswrapper[4661]: I1001 05:49:02.112621 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/29f0e9e2-bbd8-4459-bc99-db5d742a37b8-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"29f0e9e2-bbd8-4459-bc99-db5d742a37b8\") " pod="openstack/cinder-scheduler-0" Oct 01 05:49:02 crc kubenswrapper[4661]: I1001 05:49:02.112685 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29f0e9e2-bbd8-4459-bc99-db5d742a37b8-config-data\") pod \"cinder-scheduler-0\" (UID: \"29f0e9e2-bbd8-4459-bc99-db5d742a37b8\") " pod="openstack/cinder-scheduler-0" Oct 01 05:49:02 crc kubenswrapper[4661]: I1001 05:49:02.112714 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/29f0e9e2-bbd8-4459-bc99-db5d742a37b8-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"29f0e9e2-bbd8-4459-bc99-db5d742a37b8\") " pod="openstack/cinder-scheduler-0" Oct 01 05:49:02 crc kubenswrapper[4661]: I1001 05:49:02.112755 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmj9b\" (UniqueName: \"kubernetes.io/projected/29f0e9e2-bbd8-4459-bc99-db5d742a37b8-kube-api-access-jmj9b\") pod \"cinder-scheduler-0\" (UID: \"29f0e9e2-bbd8-4459-bc99-db5d742a37b8\") " pod="openstack/cinder-scheduler-0" Oct 01 05:49:02 crc kubenswrapper[4661]: I1001 05:49:02.112775 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29f0e9e2-bbd8-4459-bc99-db5d742a37b8-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"29f0e9e2-bbd8-4459-bc99-db5d742a37b8\") " pod="openstack/cinder-scheduler-0" Oct 01 05:49:02 crc kubenswrapper[4661]: I1001 05:49:02.112827 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29f0e9e2-bbd8-4459-bc99-db5d742a37b8-scripts\") pod \"cinder-scheduler-0\" (UID: \"29f0e9e2-bbd8-4459-bc99-db5d742a37b8\") " pod="openstack/cinder-scheduler-0" Oct 01 05:49:02 crc kubenswrapper[4661]: I1001 05:49:02.113149 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/29f0e9e2-bbd8-4459-bc99-db5d742a37b8-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"29f0e9e2-bbd8-4459-bc99-db5d742a37b8\") " pod="openstack/cinder-scheduler-0" Oct 01 05:49:02 crc kubenswrapper[4661]: I1001 05:49:02.117163 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29f0e9e2-bbd8-4459-bc99-db5d742a37b8-scripts\") pod \"cinder-scheduler-0\" (UID: \"29f0e9e2-bbd8-4459-bc99-db5d742a37b8\") " pod="openstack/cinder-scheduler-0" Oct 01 05:49:02 crc kubenswrapper[4661]: I1001 05:49:02.117267 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29f0e9e2-bbd8-4459-bc99-db5d742a37b8-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"29f0e9e2-bbd8-4459-bc99-db5d742a37b8\") " pod="openstack/cinder-scheduler-0" Oct 01 05:49:02 crc kubenswrapper[4661]: I1001 05:49:02.117618 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/29f0e9e2-bbd8-4459-bc99-db5d742a37b8-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"29f0e9e2-bbd8-4459-bc99-db5d742a37b8\") " pod="openstack/cinder-scheduler-0" Oct 01 05:49:02 crc kubenswrapper[4661]: I1001 05:49:02.120444 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29f0e9e2-bbd8-4459-bc99-db5d742a37b8-config-data\") pod \"cinder-scheduler-0\" (UID: \"29f0e9e2-bbd8-4459-bc99-db5d742a37b8\") " pod="openstack/cinder-scheduler-0" Oct 01 05:49:02 crc kubenswrapper[4661]: I1001 05:49:02.132059 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmj9b\" (UniqueName: \"kubernetes.io/projected/29f0e9e2-bbd8-4459-bc99-db5d742a37b8-kube-api-access-jmj9b\") pod \"cinder-scheduler-0\" (UID: \"29f0e9e2-bbd8-4459-bc99-db5d742a37b8\") " pod="openstack/cinder-scheduler-0" Oct 01 05:49:02 crc kubenswrapper[4661]: I1001 05:49:02.230892 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 01 05:49:02 crc kubenswrapper[4661]: I1001 05:49:02.847937 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b958c7b7-f355-4af9-a33d-e95e2f1b5e02","Type":"ContainerStarted","Data":"39deb66fa20a1c8728718fceded44a3ebf2246723a2d1469b6d2e9a36888ff14"} Oct 01 05:49:03 crc kubenswrapper[4661]: I1001 05:49:03.228246 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 01 05:49:03 crc kubenswrapper[4661]: I1001 05:49:03.228315 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 01 05:49:03 crc kubenswrapper[4661]: I1001 05:49:03.280429 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 01 05:49:03 crc kubenswrapper[4661]: I1001 05:49:03.309081 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 01 05:49:03 crc kubenswrapper[4661]: I1001 05:49:03.768982 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0297f772-f42e-4830-a2f8-485658085c9d" path="/var/lib/kubelet/pods/0297f772-f42e-4830-a2f8-485658085c9d/volumes" Oct 01 05:49:03 crc kubenswrapper[4661]: I1001 05:49:03.868716 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 01 05:49:03 crc kubenswrapper[4661]: I1001 05:49:03.869810 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 01 05:49:05 crc kubenswrapper[4661]: I1001 05:49:05.632986 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 01 05:49:05 crc kubenswrapper[4661]: I1001 05:49:05.633334 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 01 05:49:06 crc kubenswrapper[4661]: I1001 05:49:06.344833 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 01 05:49:06 crc kubenswrapper[4661]: I1001 05:49:06.344875 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 01 05:49:06 crc kubenswrapper[4661]: I1001 05:49:06.383059 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 01 05:49:06 crc kubenswrapper[4661]: I1001 05:49:06.401533 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 01 05:49:06 crc kubenswrapper[4661]: I1001 05:49:06.900404 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 01 05:49:06 crc kubenswrapper[4661]: I1001 05:49:06.900990 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 01 05:49:07 crc kubenswrapper[4661]: I1001 05:49:07.154400 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 01 05:49:07 crc kubenswrapper[4661]: E1001 05:49:07.578562 4661 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8fdca12_5e6d_43d7_ae59_33b3a388ada4.slice/crio-7bd34b4a10d518e048f69aa9fbf1af80c2025a202af069f0593233df27c325d9\": RecentStats: unable to find data in memory cache]" Oct 01 05:49:08 crc kubenswrapper[4661]: I1001 05:49:08.152057 4661 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 05:49:08 crc kubenswrapper[4661]: I1001 05:49:08.800414 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 01 05:49:08 crc kubenswrapper[4661]: I1001 05:49:08.934728 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 05:49:08 crc kubenswrapper[4661]: I1001 05:49:08.939788 4661 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 05:49:08 crc kubenswrapper[4661]: I1001 05:49:08.939812 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-cqms7" event={"ID":"198fb851-8eef-40d8-9074-997436cc6373","Type":"ContainerStarted","Data":"4b0965c75d4b3b8a906510553dbda0fd498f559e06c8b1b188d7dc7be8f56b59"} Oct 01 05:49:08 crc kubenswrapper[4661]: W1001 05:49:08.946110 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod29f0e9e2_bbd8_4459_bc99_db5d742a37b8.slice/crio-d10a849202c405d708fee4dc1d23efcddc1629e466dc183f616dfa5e63ee5d28 WatchSource:0}: Error finding container d10a849202c405d708fee4dc1d23efcddc1629e466dc183f616dfa5e63ee5d28: Status 404 returned error can't find the container with id d10a849202c405d708fee4dc1d23efcddc1629e466dc183f616dfa5e63ee5d28 Oct 01 05:49:08 crc kubenswrapper[4661]: I1001 05:49:08.963503 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-cqms7" podStartSLOduration=2.413112603 podStartE2EDuration="14.963251026s" podCreationTimestamp="2025-10-01 05:48:54 +0000 UTC" firstStartedPulling="2025-10-01 05:48:55.913450747 +0000 UTC m=+1184.851429361" lastFinishedPulling="2025-10-01 05:49:08.46358917 +0000 UTC m=+1197.401567784" observedRunningTime="2025-10-01 05:49:08.95720079 +0000 UTC m=+1197.895179414" watchObservedRunningTime="2025-10-01 05:49:08.963251026 +0000 UTC m=+1197.901229650" Oct 01 05:49:09 crc kubenswrapper[4661]: I1001 05:49:09.118255 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 01 05:49:09 crc kubenswrapper[4661]: I1001 05:49:09.950805 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"29f0e9e2-bbd8-4459-bc99-db5d742a37b8","Type":"ContainerStarted","Data":"eebb0342af30fd38432fb6b0d9d2bcd90e121302d725f243eb260625344428f7"} Oct 01 05:49:09 crc kubenswrapper[4661]: I1001 05:49:09.951078 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"29f0e9e2-bbd8-4459-bc99-db5d742a37b8","Type":"ContainerStarted","Data":"d10a849202c405d708fee4dc1d23efcddc1629e466dc183f616dfa5e63ee5d28"} Oct 01 05:49:09 crc kubenswrapper[4661]: I1001 05:49:09.954463 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b958c7b7-f355-4af9-a33d-e95e2f1b5e02","Type":"ContainerStarted","Data":"746a444b21dd8734930ba3638b27a9498bfdd3e0349ecc8873415f9f0e3e7c1a"} Oct 01 05:49:09 crc kubenswrapper[4661]: I1001 05:49:09.954821 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 01 05:49:09 crc kubenswrapper[4661]: I1001 05:49:09.954794 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b958c7b7-f355-4af9-a33d-e95e2f1b5e02" containerName="ceilometer-central-agent" containerID="cri-o://5d9d5191af7fdafaa6245701c19b5ccfd388e2d24756de94ba1c423e74929569" gracePeriod=30 Oct 01 05:49:09 crc kubenswrapper[4661]: I1001 05:49:09.954845 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b958c7b7-f355-4af9-a33d-e95e2f1b5e02" containerName="proxy-httpd" containerID="cri-o://746a444b21dd8734930ba3638b27a9498bfdd3e0349ecc8873415f9f0e3e7c1a" gracePeriod=30 Oct 01 05:49:09 crc kubenswrapper[4661]: I1001 05:49:09.954900 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b958c7b7-f355-4af9-a33d-e95e2f1b5e02" containerName="ceilometer-notification-agent" containerID="cri-o://06b958966968ecde7f58e4712d5ad6e6c29db511fc718dbafc8ab1806e02f6d9" gracePeriod=30 Oct 01 05:49:09 crc kubenswrapper[4661]: I1001 05:49:09.954912 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b958c7b7-f355-4af9-a33d-e95e2f1b5e02" containerName="sg-core" containerID="cri-o://39deb66fa20a1c8728718fceded44a3ebf2246723a2d1469b6d2e9a36888ff14" gracePeriod=30 Oct 01 05:49:09 crc kubenswrapper[4661]: I1001 05:49:09.980696 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.23879368 podStartE2EDuration="10.980676966s" podCreationTimestamp="2025-10-01 05:48:59 +0000 UTC" firstStartedPulling="2025-10-01 05:49:00.714478976 +0000 UTC m=+1189.652457590" lastFinishedPulling="2025-10-01 05:49:09.456362262 +0000 UTC m=+1198.394340876" observedRunningTime="2025-10-01 05:49:09.975917995 +0000 UTC m=+1198.913896609" watchObservedRunningTime="2025-10-01 05:49:09.980676966 +0000 UTC m=+1198.918655590" Oct 01 05:49:10 crc kubenswrapper[4661]: I1001 05:49:10.963908 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"29f0e9e2-bbd8-4459-bc99-db5d742a37b8","Type":"ContainerStarted","Data":"c7d77167e4f5a495ca03004f6e074ca481cc7bfb5a6948bd59a8286edf743638"} Oct 01 05:49:10 crc kubenswrapper[4661]: I1001 05:49:10.967260 4661 generic.go:334] "Generic (PLEG): container finished" podID="b958c7b7-f355-4af9-a33d-e95e2f1b5e02" containerID="39deb66fa20a1c8728718fceded44a3ebf2246723a2d1469b6d2e9a36888ff14" exitCode=2 Oct 01 05:49:10 crc kubenswrapper[4661]: I1001 05:49:10.967306 4661 generic.go:334] "Generic (PLEG): container finished" podID="b958c7b7-f355-4af9-a33d-e95e2f1b5e02" containerID="5d9d5191af7fdafaa6245701c19b5ccfd388e2d24756de94ba1c423e74929569" exitCode=0 Oct 01 05:49:10 crc kubenswrapper[4661]: I1001 05:49:10.967325 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b958c7b7-f355-4af9-a33d-e95e2f1b5e02","Type":"ContainerDied","Data":"39deb66fa20a1c8728718fceded44a3ebf2246723a2d1469b6d2e9a36888ff14"} Oct 01 05:49:10 crc kubenswrapper[4661]: I1001 05:49:10.967355 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b958c7b7-f355-4af9-a33d-e95e2f1b5e02","Type":"ContainerDied","Data":"5d9d5191af7fdafaa6245701c19b5ccfd388e2d24756de94ba1c423e74929569"} Oct 01 05:49:10 crc kubenswrapper[4661]: I1001 05:49:10.984424 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=9.984403879 podStartE2EDuration="9.984403879s" podCreationTimestamp="2025-10-01 05:49:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:49:10.9819037 +0000 UTC m=+1199.919882324" watchObservedRunningTime="2025-10-01 05:49:10.984403879 +0000 UTC m=+1199.922382493" Oct 01 05:49:12 crc kubenswrapper[4661]: I1001 05:49:12.231960 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 01 05:49:14 crc kubenswrapper[4661]: I1001 05:49:14.007160 4661 generic.go:334] "Generic (PLEG): container finished" podID="b958c7b7-f355-4af9-a33d-e95e2f1b5e02" containerID="06b958966968ecde7f58e4712d5ad6e6c29db511fc718dbafc8ab1806e02f6d9" exitCode=0 Oct 01 05:49:14 crc kubenswrapper[4661]: I1001 05:49:14.007250 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b958c7b7-f355-4af9-a33d-e95e2f1b5e02","Type":"ContainerDied","Data":"06b958966968ecde7f58e4712d5ad6e6c29db511fc718dbafc8ab1806e02f6d9"} Oct 01 05:49:17 crc kubenswrapper[4661]: I1001 05:49:17.415400 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 01 05:49:22 crc kubenswrapper[4661]: E1001 05:49:22.681894 4661 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="71cf4296b224f8f5ca458af2508a68d347ed20572d1386a3d28c38271e2d2e61" cmd=["/usr/bin/pgrep","-f","-r","DRST","watcher-decision-engine"] Oct 01 05:49:22 crc kubenswrapper[4661]: E1001 05:49:22.683504 4661 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="71cf4296b224f8f5ca458af2508a68d347ed20572d1386a3d28c38271e2d2e61" cmd=["/usr/bin/pgrep","-f","-r","DRST","watcher-decision-engine"] Oct 01 05:49:22 crc kubenswrapper[4661]: E1001 05:49:22.686612 4661 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="71cf4296b224f8f5ca458af2508a68d347ed20572d1386a3d28c38271e2d2e61" cmd=["/usr/bin/pgrep","-f","-r","DRST","watcher-decision-engine"] Oct 01 05:49:22 crc kubenswrapper[4661]: E1001 05:49:22.686825 4661 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/watcher-decision-engine-0" podUID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" containerName="watcher-decision-engine" Oct 01 05:49:23 crc kubenswrapper[4661]: I1001 05:49:23.124877 4661 generic.go:334] "Generic (PLEG): container finished" podID="198fb851-8eef-40d8-9074-997436cc6373" containerID="4b0965c75d4b3b8a906510553dbda0fd498f559e06c8b1b188d7dc7be8f56b59" exitCode=0 Oct 01 05:49:23 crc kubenswrapper[4661]: I1001 05:49:23.124925 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-cqms7" event={"ID":"198fb851-8eef-40d8-9074-997436cc6373","Type":"ContainerDied","Data":"4b0965c75d4b3b8a906510553dbda0fd498f559e06c8b1b188d7dc7be8f56b59"} Oct 01 05:49:24 crc kubenswrapper[4661]: I1001 05:49:24.674453 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-cqms7" Oct 01 05:49:24 crc kubenswrapper[4661]: I1001 05:49:24.820970 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/198fb851-8eef-40d8-9074-997436cc6373-scripts\") pod \"198fb851-8eef-40d8-9074-997436cc6373\" (UID: \"198fb851-8eef-40d8-9074-997436cc6373\") " Oct 01 05:49:24 crc kubenswrapper[4661]: I1001 05:49:24.821064 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w6lx2\" (UniqueName: \"kubernetes.io/projected/198fb851-8eef-40d8-9074-997436cc6373-kube-api-access-w6lx2\") pod \"198fb851-8eef-40d8-9074-997436cc6373\" (UID: \"198fb851-8eef-40d8-9074-997436cc6373\") " Oct 01 05:49:24 crc kubenswrapper[4661]: I1001 05:49:24.821141 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/198fb851-8eef-40d8-9074-997436cc6373-config-data\") pod \"198fb851-8eef-40d8-9074-997436cc6373\" (UID: \"198fb851-8eef-40d8-9074-997436cc6373\") " Oct 01 05:49:24 crc kubenswrapper[4661]: I1001 05:49:24.821209 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/198fb851-8eef-40d8-9074-997436cc6373-combined-ca-bundle\") pod \"198fb851-8eef-40d8-9074-997436cc6373\" (UID: \"198fb851-8eef-40d8-9074-997436cc6373\") " Oct 01 05:49:24 crc kubenswrapper[4661]: I1001 05:49:24.844191 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/198fb851-8eef-40d8-9074-997436cc6373-kube-api-access-w6lx2" (OuterVolumeSpecName: "kube-api-access-w6lx2") pod "198fb851-8eef-40d8-9074-997436cc6373" (UID: "198fb851-8eef-40d8-9074-997436cc6373"). InnerVolumeSpecName "kube-api-access-w6lx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:49:24 crc kubenswrapper[4661]: I1001 05:49:24.844669 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/198fb851-8eef-40d8-9074-997436cc6373-scripts" (OuterVolumeSpecName: "scripts") pod "198fb851-8eef-40d8-9074-997436cc6373" (UID: "198fb851-8eef-40d8-9074-997436cc6373"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:24 crc kubenswrapper[4661]: I1001 05:49:24.874612 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/198fb851-8eef-40d8-9074-997436cc6373-config-data" (OuterVolumeSpecName: "config-data") pod "198fb851-8eef-40d8-9074-997436cc6373" (UID: "198fb851-8eef-40d8-9074-997436cc6373"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:24 crc kubenswrapper[4661]: I1001 05:49:24.875895 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/198fb851-8eef-40d8-9074-997436cc6373-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "198fb851-8eef-40d8-9074-997436cc6373" (UID: "198fb851-8eef-40d8-9074-997436cc6373"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:24 crc kubenswrapper[4661]: I1001 05:49:24.923585 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/198fb851-8eef-40d8-9074-997436cc6373-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:24 crc kubenswrapper[4661]: I1001 05:49:24.923612 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/198fb851-8eef-40d8-9074-997436cc6373-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:24 crc kubenswrapper[4661]: I1001 05:49:24.923624 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w6lx2\" (UniqueName: \"kubernetes.io/projected/198fb851-8eef-40d8-9074-997436cc6373-kube-api-access-w6lx2\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:24 crc kubenswrapper[4661]: I1001 05:49:24.923678 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/198fb851-8eef-40d8-9074-997436cc6373-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:25 crc kubenswrapper[4661]: I1001 05:49:25.157530 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-cqms7" event={"ID":"198fb851-8eef-40d8-9074-997436cc6373","Type":"ContainerDied","Data":"8f9d487b6043b2c27c6c5b96a3458b9d50aa402f12cc63e2844eeca6df681888"} Oct 01 05:49:25 crc kubenswrapper[4661]: I1001 05:49:25.157607 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f9d487b6043b2c27c6c5b96a3458b9d50aa402f12cc63e2844eeca6df681888" Oct 01 05:49:25 crc kubenswrapper[4661]: I1001 05:49:25.157661 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-cqms7" Oct 01 05:49:25 crc kubenswrapper[4661]: I1001 05:49:25.312115 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 01 05:49:25 crc kubenswrapper[4661]: E1001 05:49:25.312836 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="198fb851-8eef-40d8-9074-997436cc6373" containerName="nova-cell0-conductor-db-sync" Oct 01 05:49:25 crc kubenswrapper[4661]: I1001 05:49:25.312865 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="198fb851-8eef-40d8-9074-997436cc6373" containerName="nova-cell0-conductor-db-sync" Oct 01 05:49:25 crc kubenswrapper[4661]: I1001 05:49:25.313327 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="198fb851-8eef-40d8-9074-997436cc6373" containerName="nova-cell0-conductor-db-sync" Oct 01 05:49:25 crc kubenswrapper[4661]: I1001 05:49:25.314389 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 01 05:49:25 crc kubenswrapper[4661]: I1001 05:49:25.316368 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 01 05:49:25 crc kubenswrapper[4661]: I1001 05:49:25.316999 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-k2mc8" Oct 01 05:49:25 crc kubenswrapper[4661]: I1001 05:49:25.326627 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 01 05:49:25 crc kubenswrapper[4661]: I1001 05:49:25.436367 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2652d904-fc8d-4fe1-8980-896266d64eec-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"2652d904-fc8d-4fe1-8980-896266d64eec\") " pod="openstack/nova-cell0-conductor-0" Oct 01 05:49:25 crc kubenswrapper[4661]: I1001 05:49:25.436681 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2652d904-fc8d-4fe1-8980-896266d64eec-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"2652d904-fc8d-4fe1-8980-896266d64eec\") " pod="openstack/nova-cell0-conductor-0" Oct 01 05:49:25 crc kubenswrapper[4661]: I1001 05:49:25.436875 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5n4ls\" (UniqueName: \"kubernetes.io/projected/2652d904-fc8d-4fe1-8980-896266d64eec-kube-api-access-5n4ls\") pod \"nova-cell0-conductor-0\" (UID: \"2652d904-fc8d-4fe1-8980-896266d64eec\") " pod="openstack/nova-cell0-conductor-0" Oct 01 05:49:25 crc kubenswrapper[4661]: I1001 05:49:25.538517 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2652d904-fc8d-4fe1-8980-896266d64eec-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"2652d904-fc8d-4fe1-8980-896266d64eec\") " pod="openstack/nova-cell0-conductor-0" Oct 01 05:49:25 crc kubenswrapper[4661]: I1001 05:49:25.539787 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2652d904-fc8d-4fe1-8980-896266d64eec-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"2652d904-fc8d-4fe1-8980-896266d64eec\") " pod="openstack/nova-cell0-conductor-0" Oct 01 05:49:25 crc kubenswrapper[4661]: I1001 05:49:25.539978 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5n4ls\" (UniqueName: \"kubernetes.io/projected/2652d904-fc8d-4fe1-8980-896266d64eec-kube-api-access-5n4ls\") pod \"nova-cell0-conductor-0\" (UID: \"2652d904-fc8d-4fe1-8980-896266d64eec\") " pod="openstack/nova-cell0-conductor-0" Oct 01 05:49:25 crc kubenswrapper[4661]: I1001 05:49:25.548530 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2652d904-fc8d-4fe1-8980-896266d64eec-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"2652d904-fc8d-4fe1-8980-896266d64eec\") " pod="openstack/nova-cell0-conductor-0" Oct 01 05:49:25 crc kubenswrapper[4661]: I1001 05:49:25.550652 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2652d904-fc8d-4fe1-8980-896266d64eec-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"2652d904-fc8d-4fe1-8980-896266d64eec\") " pod="openstack/nova-cell0-conductor-0" Oct 01 05:49:25 crc kubenswrapper[4661]: I1001 05:49:25.574111 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5n4ls\" (UniqueName: \"kubernetes.io/projected/2652d904-fc8d-4fe1-8980-896266d64eec-kube-api-access-5n4ls\") pod \"nova-cell0-conductor-0\" (UID: \"2652d904-fc8d-4fe1-8980-896266d64eec\") " pod="openstack/nova-cell0-conductor-0" Oct 01 05:49:25 crc kubenswrapper[4661]: I1001 05:49:25.643448 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.138010 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.169702 4661 generic.go:334] "Generic (PLEG): container finished" podID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" containerID="71cf4296b224f8f5ca458af2508a68d347ed20572d1386a3d28c38271e2d2e61" exitCode=137 Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.169996 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"87a9e404-beb1-4f1d-a7a2-188ccdacbb81","Type":"ContainerDied","Data":"71cf4296b224f8f5ca458af2508a68d347ed20572d1386a3d28c38271e2d2e61"} Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.170021 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"87a9e404-beb1-4f1d-a7a2-188ccdacbb81","Type":"ContainerDied","Data":"ec5d81f979af53b5613a1941f4065a44125ce4715a01d8c6645f59740b91964c"} Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.170039 4661 scope.go:117] "RemoveContainer" containerID="71cf4296b224f8f5ca458af2508a68d347ed20572d1386a3d28c38271e2d2e61" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.170148 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.206734 4661 scope.go:117] "RemoveContainer" containerID="264f7e328e4ac57c670183a8834d2477fa48b96eef460bdc4889eab4cd54ba74" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.239672 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.264001 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8b7jj\" (UniqueName: \"kubernetes.io/projected/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-kube-api-access-8b7jj\") pod \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\" (UID: \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\") " Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.264141 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-config-data\") pod \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\" (UID: \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\") " Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.264193 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-custom-prometheus-ca\") pod \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\" (UID: \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\") " Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.264215 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-logs\") pod \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\" (UID: \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\") " Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.264328 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-combined-ca-bundle\") pod \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\" (UID: \"87a9e404-beb1-4f1d-a7a2-188ccdacbb81\") " Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.265345 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-logs" (OuterVolumeSpecName: "logs") pod "87a9e404-beb1-4f1d-a7a2-188ccdacbb81" (UID: "87a9e404-beb1-4f1d-a7a2-188ccdacbb81"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.266716 4661 scope.go:117] "RemoveContainer" containerID="71cf4296b224f8f5ca458af2508a68d347ed20572d1386a3d28c38271e2d2e61" Oct 01 05:49:26 crc kubenswrapper[4661]: E1001 05:49:26.268961 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71cf4296b224f8f5ca458af2508a68d347ed20572d1386a3d28c38271e2d2e61\": container with ID starting with 71cf4296b224f8f5ca458af2508a68d347ed20572d1386a3d28c38271e2d2e61 not found: ID does not exist" containerID="71cf4296b224f8f5ca458af2508a68d347ed20572d1386a3d28c38271e2d2e61" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.269006 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71cf4296b224f8f5ca458af2508a68d347ed20572d1386a3d28c38271e2d2e61"} err="failed to get container status \"71cf4296b224f8f5ca458af2508a68d347ed20572d1386a3d28c38271e2d2e61\": rpc error: code = NotFound desc = could not find container \"71cf4296b224f8f5ca458af2508a68d347ed20572d1386a3d28c38271e2d2e61\": container with ID starting with 71cf4296b224f8f5ca458af2508a68d347ed20572d1386a3d28c38271e2d2e61 not found: ID does not exist" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.269034 4661 scope.go:117] "RemoveContainer" containerID="264f7e328e4ac57c670183a8834d2477fa48b96eef460bdc4889eab4cd54ba74" Oct 01 05:49:26 crc kubenswrapper[4661]: E1001 05:49:26.269348 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"264f7e328e4ac57c670183a8834d2477fa48b96eef460bdc4889eab4cd54ba74\": container with ID starting with 264f7e328e4ac57c670183a8834d2477fa48b96eef460bdc4889eab4cd54ba74 not found: ID does not exist" containerID="264f7e328e4ac57c670183a8834d2477fa48b96eef460bdc4889eab4cd54ba74" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.269382 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"264f7e328e4ac57c670183a8834d2477fa48b96eef460bdc4889eab4cd54ba74"} err="failed to get container status \"264f7e328e4ac57c670183a8834d2477fa48b96eef460bdc4889eab4cd54ba74\": rpc error: code = NotFound desc = could not find container \"264f7e328e4ac57c670183a8834d2477fa48b96eef460bdc4889eab4cd54ba74\": container with ID starting with 264f7e328e4ac57c670183a8834d2477fa48b96eef460bdc4889eab4cd54ba74 not found: ID does not exist" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.270307 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-kube-api-access-8b7jj" (OuterVolumeSpecName: "kube-api-access-8b7jj") pod "87a9e404-beb1-4f1d-a7a2-188ccdacbb81" (UID: "87a9e404-beb1-4f1d-a7a2-188ccdacbb81"). InnerVolumeSpecName "kube-api-access-8b7jj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.310417 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "87a9e404-beb1-4f1d-a7a2-188ccdacbb81" (UID: "87a9e404-beb1-4f1d-a7a2-188ccdacbb81"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.312518 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "87a9e404-beb1-4f1d-a7a2-188ccdacbb81" (UID: "87a9e404-beb1-4f1d-a7a2-188ccdacbb81"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.339774 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-config-data" (OuterVolumeSpecName: "config-data") pod "87a9e404-beb1-4f1d-a7a2-188ccdacbb81" (UID: "87a9e404-beb1-4f1d-a7a2-188ccdacbb81"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.366338 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.366377 4661 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.366388 4661 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-logs\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.366397 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.366407 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8b7jj\" (UniqueName: \"kubernetes.io/projected/87a9e404-beb1-4f1d-a7a2-188ccdacbb81-kube-api-access-8b7jj\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.500535 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-decision-engine-0"] Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.509488 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-decision-engine-0"] Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.520278 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-decision-engine-0"] Oct 01 05:49:26 crc kubenswrapper[4661]: E1001 05:49:26.520707 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" containerName="watcher-decision-engine" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.520726 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" containerName="watcher-decision-engine" Oct 01 05:49:26 crc kubenswrapper[4661]: E1001 05:49:26.520768 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" containerName="watcher-decision-engine" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.520775 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" containerName="watcher-decision-engine" Oct 01 05:49:26 crc kubenswrapper[4661]: E1001 05:49:26.520784 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" containerName="watcher-decision-engine" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.520789 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" containerName="watcher-decision-engine" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.520957 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" containerName="watcher-decision-engine" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.520976 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" containerName="watcher-decision-engine" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.520988 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" containerName="watcher-decision-engine" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.521596 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.525170 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-decision-engine-config-data" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.530464 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.671499 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.671993 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a-logs\") pod \"watcher-decision-engine-0\" (UID: \"7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.672203 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a-config-data\") pod \"watcher-decision-engine-0\" (UID: \"7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.672300 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnxmb\" (UniqueName: \"kubernetes.io/projected/7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a-kube-api-access-rnxmb\") pod \"watcher-decision-engine-0\" (UID: \"7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.672562 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.774484 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.774577 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.774681 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a-logs\") pod \"watcher-decision-engine-0\" (UID: \"7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.774787 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a-config-data\") pod \"watcher-decision-engine-0\" (UID: \"7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.774836 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnxmb\" (UniqueName: \"kubernetes.io/projected/7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a-kube-api-access-rnxmb\") pod \"watcher-decision-engine-0\" (UID: \"7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.776840 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a-logs\") pod \"watcher-decision-engine-0\" (UID: \"7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.792573 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.800825 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.800941 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a-config-data\") pod \"watcher-decision-engine-0\" (UID: \"7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.803501 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnxmb\" (UniqueName: \"kubernetes.io/projected/7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a-kube-api-access-rnxmb\") pod \"watcher-decision-engine-0\" (UID: \"7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a\") " pod="openstack/watcher-decision-engine-0" Oct 01 05:49:26 crc kubenswrapper[4661]: I1001 05:49:26.840237 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Oct 01 05:49:27 crc kubenswrapper[4661]: I1001 05:49:27.178613 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"2652d904-fc8d-4fe1-8980-896266d64eec","Type":"ContainerStarted","Data":"ad653912c8d5f4fb90f61e6e75b0a727af4a34311e400015a5957db8cd1c1fe5"} Oct 01 05:49:27 crc kubenswrapper[4661]: I1001 05:49:27.178931 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"2652d904-fc8d-4fe1-8980-896266d64eec","Type":"ContainerStarted","Data":"336a2185bd65d240495672bacf24c940147641ebc4bab0366bcfa84fdb69d175"} Oct 01 05:49:27 crc kubenswrapper[4661]: I1001 05:49:27.180074 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 01 05:49:27 crc kubenswrapper[4661]: I1001 05:49:27.200714 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.200696974 podStartE2EDuration="2.200696974s" podCreationTimestamp="2025-10-01 05:49:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:49:27.200411175 +0000 UTC m=+1216.138389799" watchObservedRunningTime="2025-10-01 05:49:27.200696974 +0000 UTC m=+1216.138675588" Oct 01 05:49:27 crc kubenswrapper[4661]: I1001 05:49:27.476733 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Oct 01 05:49:27 crc kubenswrapper[4661]: I1001 05:49:27.776139 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" path="/var/lib/kubelet/pods/87a9e404-beb1-4f1d-a7a2-188ccdacbb81/volumes" Oct 01 05:49:28 crc kubenswrapper[4661]: I1001 05:49:28.195571 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a","Type":"ContainerStarted","Data":"9b788770e92343234de8ef0997945542864ea69fb114372b0dba2e2f75f6614c"} Oct 01 05:49:28 crc kubenswrapper[4661]: I1001 05:49:28.195676 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a","Type":"ContainerStarted","Data":"8eeb7b614b86f091f43695539e59da29fd0dac6619c629819bda93e09f5de827"} Oct 01 05:49:28 crc kubenswrapper[4661]: I1001 05:49:28.229709 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-decision-engine-0" podStartSLOduration=2.229685591 podStartE2EDuration="2.229685591s" podCreationTimestamp="2025-10-01 05:49:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:49:28.222504093 +0000 UTC m=+1217.160482727" watchObservedRunningTime="2025-10-01 05:49:28.229685591 +0000 UTC m=+1217.167664235" Oct 01 05:49:30 crc kubenswrapper[4661]: I1001 05:49:30.247122 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="b958c7b7-f355-4af9-a33d-e95e2f1b5e02" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Oct 01 05:49:35 crc kubenswrapper[4661]: I1001 05:49:35.692915 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.282785 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-csgws"] Oct 01 05:49:36 crc kubenswrapper[4661]: E1001 05:49:36.283493 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" containerName="watcher-decision-engine" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.283514 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" containerName="watcher-decision-engine" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.286800 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="87a9e404-beb1-4f1d-a7a2-188ccdacbb81" containerName="watcher-decision-engine" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.287580 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-csgws" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.292230 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.292517 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.308324 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7n2m\" (UniqueName: \"kubernetes.io/projected/2410d057-e29d-4a70-bc09-c692348e5018-kube-api-access-j7n2m\") pod \"nova-cell0-cell-mapping-csgws\" (UID: \"2410d057-e29d-4a70-bc09-c692348e5018\") " pod="openstack/nova-cell0-cell-mapping-csgws" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.308932 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2410d057-e29d-4a70-bc09-c692348e5018-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-csgws\" (UID: \"2410d057-e29d-4a70-bc09-c692348e5018\") " pod="openstack/nova-cell0-cell-mapping-csgws" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.308969 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2410d057-e29d-4a70-bc09-c692348e5018-scripts\") pod \"nova-cell0-cell-mapping-csgws\" (UID: \"2410d057-e29d-4a70-bc09-c692348e5018\") " pod="openstack/nova-cell0-cell-mapping-csgws" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.309053 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2410d057-e29d-4a70-bc09-c692348e5018-config-data\") pod \"nova-cell0-cell-mapping-csgws\" (UID: \"2410d057-e29d-4a70-bc09-c692348e5018\") " pod="openstack/nova-cell0-cell-mapping-csgws" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.324047 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-csgws"] Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.410613 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2410d057-e29d-4a70-bc09-c692348e5018-config-data\") pod \"nova-cell0-cell-mapping-csgws\" (UID: \"2410d057-e29d-4a70-bc09-c692348e5018\") " pod="openstack/nova-cell0-cell-mapping-csgws" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.410786 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7n2m\" (UniqueName: \"kubernetes.io/projected/2410d057-e29d-4a70-bc09-c692348e5018-kube-api-access-j7n2m\") pod \"nova-cell0-cell-mapping-csgws\" (UID: \"2410d057-e29d-4a70-bc09-c692348e5018\") " pod="openstack/nova-cell0-cell-mapping-csgws" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.410901 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2410d057-e29d-4a70-bc09-c692348e5018-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-csgws\" (UID: \"2410d057-e29d-4a70-bc09-c692348e5018\") " pod="openstack/nova-cell0-cell-mapping-csgws" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.410934 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2410d057-e29d-4a70-bc09-c692348e5018-scripts\") pod \"nova-cell0-cell-mapping-csgws\" (UID: \"2410d057-e29d-4a70-bc09-c692348e5018\") " pod="openstack/nova-cell0-cell-mapping-csgws" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.419651 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2410d057-e29d-4a70-bc09-c692348e5018-config-data\") pod \"nova-cell0-cell-mapping-csgws\" (UID: \"2410d057-e29d-4a70-bc09-c692348e5018\") " pod="openstack/nova-cell0-cell-mapping-csgws" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.423105 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2410d057-e29d-4a70-bc09-c692348e5018-scripts\") pod \"nova-cell0-cell-mapping-csgws\" (UID: \"2410d057-e29d-4a70-bc09-c692348e5018\") " pod="openstack/nova-cell0-cell-mapping-csgws" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.440296 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.440727 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7n2m\" (UniqueName: \"kubernetes.io/projected/2410d057-e29d-4a70-bc09-c692348e5018-kube-api-access-j7n2m\") pod \"nova-cell0-cell-mapping-csgws\" (UID: \"2410d057-e29d-4a70-bc09-c692348e5018\") " pod="openstack/nova-cell0-cell-mapping-csgws" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.443100 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.457024 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.466190 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2410d057-e29d-4a70-bc09-c692348e5018-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-csgws\" (UID: \"2410d057-e29d-4a70-bc09-c692348e5018\") " pod="openstack/nova-cell0-cell-mapping-csgws" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.500748 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.513411 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/badb9016-4549-4b7d-841d-873ad822a442-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"badb9016-4549-4b7d-841d-873ad822a442\") " pod="openstack/nova-api-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.513488 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/badb9016-4549-4b7d-841d-873ad822a442-logs\") pod \"nova-api-0\" (UID: \"badb9016-4549-4b7d-841d-873ad822a442\") " pod="openstack/nova-api-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.513516 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8qlw\" (UniqueName: \"kubernetes.io/projected/badb9016-4549-4b7d-841d-873ad822a442-kube-api-access-w8qlw\") pod \"nova-api-0\" (UID: \"badb9016-4549-4b7d-841d-873ad822a442\") " pod="openstack/nova-api-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.513545 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/badb9016-4549-4b7d-841d-873ad822a442-config-data\") pod \"nova-api-0\" (UID: \"badb9016-4549-4b7d-841d-873ad822a442\") " pod="openstack/nova-api-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.527114 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.528476 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.532379 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.539535 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.573234 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.582287 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.625508 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/232f0a78-781e-48d2-a8ca-aab89e6cb3c8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"232f0a78-781e-48d2-a8ca-aab89e6cb3c8\") " pod="openstack/nova-scheduler-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.625564 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8070a6cb-4bc1-4376-818a-6b99d638166d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8070a6cb-4bc1-4376-818a-6b99d638166d\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.625620 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/badb9016-4549-4b7d-841d-873ad822a442-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"badb9016-4549-4b7d-841d-873ad822a442\") " pod="openstack/nova-api-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.625657 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9hg2\" (UniqueName: \"kubernetes.io/projected/232f0a78-781e-48d2-a8ca-aab89e6cb3c8-kube-api-access-q9hg2\") pod \"nova-scheduler-0\" (UID: \"232f0a78-781e-48d2-a8ca-aab89e6cb3c8\") " pod="openstack/nova-scheduler-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.625696 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lt2fx\" (UniqueName: \"kubernetes.io/projected/8070a6cb-4bc1-4376-818a-6b99d638166d-kube-api-access-lt2fx\") pod \"nova-cell1-novncproxy-0\" (UID: \"8070a6cb-4bc1-4376-818a-6b99d638166d\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.625832 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/badb9016-4549-4b7d-841d-873ad822a442-logs\") pod \"nova-api-0\" (UID: \"badb9016-4549-4b7d-841d-873ad822a442\") " pod="openstack/nova-api-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.625880 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8qlw\" (UniqueName: \"kubernetes.io/projected/badb9016-4549-4b7d-841d-873ad822a442-kube-api-access-w8qlw\") pod \"nova-api-0\" (UID: \"badb9016-4549-4b7d-841d-873ad822a442\") " pod="openstack/nova-api-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.625939 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/badb9016-4549-4b7d-841d-873ad822a442-config-data\") pod \"nova-api-0\" (UID: \"badb9016-4549-4b7d-841d-873ad822a442\") " pod="openstack/nova-api-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.625967 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/232f0a78-781e-48d2-a8ca-aab89e6cb3c8-config-data\") pod \"nova-scheduler-0\" (UID: \"232f0a78-781e-48d2-a8ca-aab89e6cb3c8\") " pod="openstack/nova-scheduler-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.626048 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8070a6cb-4bc1-4376-818a-6b99d638166d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8070a6cb-4bc1-4376-818a-6b99d638166d\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.626701 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/badb9016-4549-4b7d-841d-873ad822a442-logs\") pod \"nova-api-0\" (UID: \"badb9016-4549-4b7d-841d-873ad822a442\") " pod="openstack/nova-api-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.643232 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.654916 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/badb9016-4549-4b7d-841d-873ad822a442-config-data\") pod \"nova-api-0\" (UID: \"badb9016-4549-4b7d-841d-873ad822a442\") " pod="openstack/nova-api-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.657924 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-csgws" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.662224 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8qlw\" (UniqueName: \"kubernetes.io/projected/badb9016-4549-4b7d-841d-873ad822a442-kube-api-access-w8qlw\") pod \"nova-api-0\" (UID: \"badb9016-4549-4b7d-841d-873ad822a442\") " pod="openstack/nova-api-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.667496 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/badb9016-4549-4b7d-841d-873ad822a442-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"badb9016-4549-4b7d-841d-873ad822a442\") " pod="openstack/nova-api-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.675124 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.727659 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/232f0a78-781e-48d2-a8ca-aab89e6cb3c8-config-data\") pod \"nova-scheduler-0\" (UID: \"232f0a78-781e-48d2-a8ca-aab89e6cb3c8\") " pod="openstack/nova-scheduler-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.727738 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8070a6cb-4bc1-4376-818a-6b99d638166d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8070a6cb-4bc1-4376-818a-6b99d638166d\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.727797 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/232f0a78-781e-48d2-a8ca-aab89e6cb3c8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"232f0a78-781e-48d2-a8ca-aab89e6cb3c8\") " pod="openstack/nova-scheduler-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.727813 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8070a6cb-4bc1-4376-818a-6b99d638166d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8070a6cb-4bc1-4376-818a-6b99d638166d\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.727835 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9hg2\" (UniqueName: \"kubernetes.io/projected/232f0a78-781e-48d2-a8ca-aab89e6cb3c8-kube-api-access-q9hg2\") pod \"nova-scheduler-0\" (UID: \"232f0a78-781e-48d2-a8ca-aab89e6cb3c8\") " pod="openstack/nova-scheduler-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.727854 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lt2fx\" (UniqueName: \"kubernetes.io/projected/8070a6cb-4bc1-4376-818a-6b99d638166d-kube-api-access-lt2fx\") pod \"nova-cell1-novncproxy-0\" (UID: \"8070a6cb-4bc1-4376-818a-6b99d638166d\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.732729 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.734519 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.738426 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.744109 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.748097 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lt2fx\" (UniqueName: \"kubernetes.io/projected/8070a6cb-4bc1-4376-818a-6b99d638166d-kube-api-access-lt2fx\") pod \"nova-cell1-novncproxy-0\" (UID: \"8070a6cb-4bc1-4376-818a-6b99d638166d\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.749579 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/232f0a78-781e-48d2-a8ca-aab89e6cb3c8-config-data\") pod \"nova-scheduler-0\" (UID: \"232f0a78-781e-48d2-a8ca-aab89e6cb3c8\") " pod="openstack/nova-scheduler-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.755246 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8070a6cb-4bc1-4376-818a-6b99d638166d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8070a6cb-4bc1-4376-818a-6b99d638166d\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.756337 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/232f0a78-781e-48d2-a8ca-aab89e6cb3c8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"232f0a78-781e-48d2-a8ca-aab89e6cb3c8\") " pod="openstack/nova-scheduler-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.756457 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8070a6cb-4bc1-4376-818a-6b99d638166d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8070a6cb-4bc1-4376-818a-6b99d638166d\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.764256 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9hg2\" (UniqueName: \"kubernetes.io/projected/232f0a78-781e-48d2-a8ca-aab89e6cb3c8-kube-api-access-q9hg2\") pod \"nova-scheduler-0\" (UID: \"232f0a78-781e-48d2-a8ca-aab89e6cb3c8\") " pod="openstack/nova-scheduler-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.778442 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.782438 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-844fc57f6f-n2bs5"] Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.785460 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.826039 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-844fc57f6f-n2bs5"] Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.841001 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.880902 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.895899 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.923921 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.932754 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-dns-svc\") pod \"dnsmasq-dns-844fc57f6f-n2bs5\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.932917 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7b1c1df6-54c2-46e2-9ff9-51d6837e7850\") " pod="openstack/nova-metadata-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.932953 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-ovsdbserver-nb\") pod \"dnsmasq-dns-844fc57f6f-n2bs5\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.933079 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hh782\" (UniqueName: \"kubernetes.io/projected/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-kube-api-access-hh782\") pod \"nova-metadata-0\" (UID: \"7b1c1df6-54c2-46e2-9ff9-51d6837e7850\") " pod="openstack/nova-metadata-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.933097 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsvvr\" (UniqueName: \"kubernetes.io/projected/24db082c-81fd-427d-a0b0-57b64adc6f73-kube-api-access-gsvvr\") pod \"dnsmasq-dns-844fc57f6f-n2bs5\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.933362 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-logs\") pod \"nova-metadata-0\" (UID: \"7b1c1df6-54c2-46e2-9ff9-51d6837e7850\") " pod="openstack/nova-metadata-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.933395 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-config-data\") pod \"nova-metadata-0\" (UID: \"7b1c1df6-54c2-46e2-9ff9-51d6837e7850\") " pod="openstack/nova-metadata-0" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.933419 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-config\") pod \"dnsmasq-dns-844fc57f6f-n2bs5\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.933476 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-ovsdbserver-sb\") pod \"dnsmasq-dns-844fc57f6f-n2bs5\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:49:36 crc kubenswrapper[4661]: I1001 05:49:36.933496 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-dns-swift-storage-0\") pod \"dnsmasq-dns-844fc57f6f-n2bs5\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.036773 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-logs\") pod \"nova-metadata-0\" (UID: \"7b1c1df6-54c2-46e2-9ff9-51d6837e7850\") " pod="openstack/nova-metadata-0" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.037018 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-config-data\") pod \"nova-metadata-0\" (UID: \"7b1c1df6-54c2-46e2-9ff9-51d6837e7850\") " pod="openstack/nova-metadata-0" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.037045 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-config\") pod \"dnsmasq-dns-844fc57f6f-n2bs5\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.037115 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-ovsdbserver-sb\") pod \"dnsmasq-dns-844fc57f6f-n2bs5\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.037140 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-dns-swift-storage-0\") pod \"dnsmasq-dns-844fc57f6f-n2bs5\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.037195 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-dns-svc\") pod \"dnsmasq-dns-844fc57f6f-n2bs5\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.037218 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7b1c1df6-54c2-46e2-9ff9-51d6837e7850\") " pod="openstack/nova-metadata-0" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.037241 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-ovsdbserver-nb\") pod \"dnsmasq-dns-844fc57f6f-n2bs5\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.037270 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hh782\" (UniqueName: \"kubernetes.io/projected/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-kube-api-access-hh782\") pod \"nova-metadata-0\" (UID: \"7b1c1df6-54c2-46e2-9ff9-51d6837e7850\") " pod="openstack/nova-metadata-0" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.037291 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsvvr\" (UniqueName: \"kubernetes.io/projected/24db082c-81fd-427d-a0b0-57b64adc6f73-kube-api-access-gsvvr\") pod \"dnsmasq-dns-844fc57f6f-n2bs5\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.038212 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-logs\") pod \"nova-metadata-0\" (UID: \"7b1c1df6-54c2-46e2-9ff9-51d6837e7850\") " pod="openstack/nova-metadata-0" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.044548 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-dns-swift-storage-0\") pod \"dnsmasq-dns-844fc57f6f-n2bs5\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.045234 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-ovsdbserver-sb\") pod \"dnsmasq-dns-844fc57f6f-n2bs5\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.045910 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-config\") pod \"dnsmasq-dns-844fc57f6f-n2bs5\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.046010 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-dns-svc\") pod \"dnsmasq-dns-844fc57f6f-n2bs5\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.046100 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-ovsdbserver-nb\") pod \"dnsmasq-dns-844fc57f6f-n2bs5\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.048379 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-config-data\") pod \"nova-metadata-0\" (UID: \"7b1c1df6-54c2-46e2-9ff9-51d6837e7850\") " pod="openstack/nova-metadata-0" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.051140 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7b1c1df6-54c2-46e2-9ff9-51d6837e7850\") " pod="openstack/nova-metadata-0" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.057585 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsvvr\" (UniqueName: \"kubernetes.io/projected/24db082c-81fd-427d-a0b0-57b64adc6f73-kube-api-access-gsvvr\") pod \"dnsmasq-dns-844fc57f6f-n2bs5\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.061205 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hh782\" (UniqueName: \"kubernetes.io/projected/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-kube-api-access-hh782\") pod \"nova-metadata-0\" (UID: \"7b1c1df6-54c2-46e2-9ff9-51d6837e7850\") " pod="openstack/nova-metadata-0" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.115147 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.133383 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.218854 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-csgws"] Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.389850 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-csgws" event={"ID":"2410d057-e29d-4a70-bc09-c692348e5018","Type":"ContainerStarted","Data":"05ea510cc9e3b6b955d7018005f7209df9985c9cb61bbecbef94c19d359e5b86"} Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.390795 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.422525 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.506659 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.527604 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.546801 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.672952 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-5r72j"] Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.675022 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-5r72j" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.679150 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.679337 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.697261 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-5r72j"] Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.734822 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.762262 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-5r72j\" (UID: \"bf9833ec-089c-4b0c-94b6-f04b7d7773ae\") " pod="openstack/nova-cell1-conductor-db-sync-5r72j" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.762324 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-scripts\") pod \"nova-cell1-conductor-db-sync-5r72j\" (UID: \"bf9833ec-089c-4b0c-94b6-f04b7d7773ae\") " pod="openstack/nova-cell1-conductor-db-sync-5r72j" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.762347 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-config-data\") pod \"nova-cell1-conductor-db-sync-5r72j\" (UID: \"bf9833ec-089c-4b0c-94b6-f04b7d7773ae\") " pod="openstack/nova-cell1-conductor-db-sync-5r72j" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.762492 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8228f\" (UniqueName: \"kubernetes.io/projected/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-kube-api-access-8228f\") pod \"nova-cell1-conductor-db-sync-5r72j\" (UID: \"bf9833ec-089c-4b0c-94b6-f04b7d7773ae\") " pod="openstack/nova-cell1-conductor-db-sync-5r72j" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.823212 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-844fc57f6f-n2bs5"] Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.864549 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-5r72j\" (UID: \"bf9833ec-089c-4b0c-94b6-f04b7d7773ae\") " pod="openstack/nova-cell1-conductor-db-sync-5r72j" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.864623 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-scripts\") pod \"nova-cell1-conductor-db-sync-5r72j\" (UID: \"bf9833ec-089c-4b0c-94b6-f04b7d7773ae\") " pod="openstack/nova-cell1-conductor-db-sync-5r72j" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.864670 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-config-data\") pod \"nova-cell1-conductor-db-sync-5r72j\" (UID: \"bf9833ec-089c-4b0c-94b6-f04b7d7773ae\") " pod="openstack/nova-cell1-conductor-db-sync-5r72j" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.864942 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8228f\" (UniqueName: \"kubernetes.io/projected/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-kube-api-access-8228f\") pod \"nova-cell1-conductor-db-sync-5r72j\" (UID: \"bf9833ec-089c-4b0c-94b6-f04b7d7773ae\") " pod="openstack/nova-cell1-conductor-db-sync-5r72j" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.871111 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-scripts\") pod \"nova-cell1-conductor-db-sync-5r72j\" (UID: \"bf9833ec-089c-4b0c-94b6-f04b7d7773ae\") " pod="openstack/nova-cell1-conductor-db-sync-5r72j" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.871223 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-5r72j\" (UID: \"bf9833ec-089c-4b0c-94b6-f04b7d7773ae\") " pod="openstack/nova-cell1-conductor-db-sync-5r72j" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.872885 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-config-data\") pod \"nova-cell1-conductor-db-sync-5r72j\" (UID: \"bf9833ec-089c-4b0c-94b6-f04b7d7773ae\") " pod="openstack/nova-cell1-conductor-db-sync-5r72j" Oct 01 05:49:37 crc kubenswrapper[4661]: I1001 05:49:37.880859 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8228f\" (UniqueName: \"kubernetes.io/projected/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-kube-api-access-8228f\") pod \"nova-cell1-conductor-db-sync-5r72j\" (UID: \"bf9833ec-089c-4b0c-94b6-f04b7d7773ae\") " pod="openstack/nova-cell1-conductor-db-sync-5r72j" Oct 01 05:49:38 crc kubenswrapper[4661]: I1001 05:49:38.007845 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-5r72j" Oct 01 05:49:38 crc kubenswrapper[4661]: I1001 05:49:38.421364 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-csgws" event={"ID":"2410d057-e29d-4a70-bc09-c692348e5018","Type":"ContainerStarted","Data":"de75f6f845cb37f06c30281e0964a1e75c66d66508d4747e00fc45a8219876bf"} Oct 01 05:49:38 crc kubenswrapper[4661]: I1001 05:49:38.443526 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-csgws" podStartSLOduration=2.4435079330000002 podStartE2EDuration="2.443507933s" podCreationTimestamp="2025-10-01 05:49:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:49:38.440912661 +0000 UTC m=+1227.378891275" watchObservedRunningTime="2025-10-01 05:49:38.443507933 +0000 UTC m=+1227.381486547" Oct 01 05:49:38 crc kubenswrapper[4661]: I1001 05:49:38.466211 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8070a6cb-4bc1-4376-818a-6b99d638166d","Type":"ContainerStarted","Data":"578562b9818c82a954da299d679a7319ae8f51f7d854d4ca5a4b1405fcc936ee"} Oct 01 05:49:38 crc kubenswrapper[4661]: I1001 05:49:38.468880 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7b1c1df6-54c2-46e2-9ff9-51d6837e7850","Type":"ContainerStarted","Data":"33873b0008bac5ac1bf34f34d781db512317e540d325f061dfbac9cd0e76206f"} Oct 01 05:49:38 crc kubenswrapper[4661]: I1001 05:49:38.471329 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"232f0a78-781e-48d2-a8ca-aab89e6cb3c8","Type":"ContainerStarted","Data":"995acb19c4a1d10226effed4dd08f47567a4d2a00590414aea48a99d8ba3ccdd"} Oct 01 05:49:38 crc kubenswrapper[4661]: I1001 05:49:38.472404 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"badb9016-4549-4b7d-841d-873ad822a442","Type":"ContainerStarted","Data":"b0b1455b38828f0f12d23967389eac35190f540d7a06019e5f23b8f92daf05f5"} Oct 01 05:49:38 crc kubenswrapper[4661]: I1001 05:49:38.473853 4661 generic.go:334] "Generic (PLEG): container finished" podID="24db082c-81fd-427d-a0b0-57b64adc6f73" containerID="9947c1cd36302c585eff1272a6e9eefeae64444a87cd392f9cc453b44c3b3034" exitCode=0 Oct 01 05:49:38 crc kubenswrapper[4661]: I1001 05:49:38.474557 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" event={"ID":"24db082c-81fd-427d-a0b0-57b64adc6f73","Type":"ContainerDied","Data":"9947c1cd36302c585eff1272a6e9eefeae64444a87cd392f9cc453b44c3b3034"} Oct 01 05:49:38 crc kubenswrapper[4661]: I1001 05:49:38.474619 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" event={"ID":"24db082c-81fd-427d-a0b0-57b64adc6f73","Type":"ContainerStarted","Data":"fb52f75915c4578a9472e87a4ac9f4bb6df56379ec98ea5554f0d00343e5340a"} Oct 01 05:49:38 crc kubenswrapper[4661]: I1001 05:49:38.506451 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-5r72j"] Oct 01 05:49:39 crc kubenswrapper[4661]: I1001 05:49:39.493102 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-5r72j" event={"ID":"bf9833ec-089c-4b0c-94b6-f04b7d7773ae","Type":"ContainerStarted","Data":"fd51bc2fe4385ae4608492f71ca2d73d3771aed6ea0b5733b3fb66e2879a0aef"} Oct 01 05:49:40 crc kubenswrapper[4661]: I1001 05:49:40.477433 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 05:49:40 crc kubenswrapper[4661]: I1001 05:49:40.496461 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 05:49:40 crc kubenswrapper[4661]: I1001 05:49:40.507190 4661 generic.go:334] "Generic (PLEG): container finished" podID="b958c7b7-f355-4af9-a33d-e95e2f1b5e02" containerID="746a444b21dd8734930ba3638b27a9498bfdd3e0349ecc8873415f9f0e3e7c1a" exitCode=137 Oct 01 05:49:40 crc kubenswrapper[4661]: I1001 05:49:40.507252 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b958c7b7-f355-4af9-a33d-e95e2f1b5e02","Type":"ContainerDied","Data":"746a444b21dd8734930ba3638b27a9498bfdd3e0349ecc8873415f9f0e3e7c1a"} Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.068854 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.089606 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-log-httpd\") pod \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.089670 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-run-httpd\") pod \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.089722 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-config-data\") pod \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.089747 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-scripts\") pod \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.089811 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-sg-core-conf-yaml\") pod \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.089836 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-combined-ca-bundle\") pod \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.089875 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xll24\" (UniqueName: \"kubernetes.io/projected/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-kube-api-access-xll24\") pod \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\" (UID: \"b958c7b7-f355-4af9-a33d-e95e2f1b5e02\") " Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.092393 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b958c7b7-f355-4af9-a33d-e95e2f1b5e02" (UID: "b958c7b7-f355-4af9-a33d-e95e2f1b5e02"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.093403 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b958c7b7-f355-4af9-a33d-e95e2f1b5e02" (UID: "b958c7b7-f355-4af9-a33d-e95e2f1b5e02"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.108248 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-kube-api-access-xll24" (OuterVolumeSpecName: "kube-api-access-xll24") pod "b958c7b7-f355-4af9-a33d-e95e2f1b5e02" (UID: "b958c7b7-f355-4af9-a33d-e95e2f1b5e02"). InnerVolumeSpecName "kube-api-access-xll24". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.112223 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-scripts" (OuterVolumeSpecName: "scripts") pod "b958c7b7-f355-4af9-a33d-e95e2f1b5e02" (UID: "b958c7b7-f355-4af9-a33d-e95e2f1b5e02"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.191647 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xll24\" (UniqueName: \"kubernetes.io/projected/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-kube-api-access-xll24\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.192201 4661 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.192383 4661 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.192398 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.264487 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b958c7b7-f355-4af9-a33d-e95e2f1b5e02" (UID: "b958c7b7-f355-4af9-a33d-e95e2f1b5e02"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.294191 4661 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.342213 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b958c7b7-f355-4af9-a33d-e95e2f1b5e02" (UID: "b958c7b7-f355-4af9-a33d-e95e2f1b5e02"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.396412 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.437484 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-config-data" (OuterVolumeSpecName: "config-data") pod "b958c7b7-f355-4af9-a33d-e95e2f1b5e02" (UID: "b958c7b7-f355-4af9-a33d-e95e2f1b5e02"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.497234 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b958c7b7-f355-4af9-a33d-e95e2f1b5e02-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.538534 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8070a6cb-4bc1-4376-818a-6b99d638166d","Type":"ContainerStarted","Data":"be0b2e70ed01ce29b794479aff4bdebc48a7399c396df1948b7415a936598030"} Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.538733 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="8070a6cb-4bc1-4376-818a-6b99d638166d" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://be0b2e70ed01ce29b794479aff4bdebc48a7399c396df1948b7415a936598030" gracePeriod=30 Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.562697 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7b1c1df6-54c2-46e2-9ff9-51d6837e7850","Type":"ContainerStarted","Data":"fde785b55e762d033fa631fb6e4669eb7db71f7fbd0a9040850349258c6feadc"} Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.563063 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7b1c1df6-54c2-46e2-9ff9-51d6837e7850","Type":"ContainerStarted","Data":"d775f51e44aa55626189c97e6f609f9ed50b2d49938eae6f84affe455c0d4e34"} Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.563252 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7b1c1df6-54c2-46e2-9ff9-51d6837e7850" containerName="nova-metadata-log" containerID="cri-o://d775f51e44aa55626189c97e6f609f9ed50b2d49938eae6f84affe455c0d4e34" gracePeriod=30 Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.563683 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7b1c1df6-54c2-46e2-9ff9-51d6837e7850" containerName="nova-metadata-metadata" containerID="cri-o://fde785b55e762d033fa631fb6e4669eb7db71f7fbd0a9040850349258c6feadc" gracePeriod=30 Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.575149 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"232f0a78-781e-48d2-a8ca-aab89e6cb3c8","Type":"ContainerStarted","Data":"62e20d27697a31d2ec91ee3cd5379262fb201b6ffb98a6b34fea475c522749d5"} Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.585934 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-5r72j" event={"ID":"bf9833ec-089c-4b0c-94b6-f04b7d7773ae","Type":"ContainerStarted","Data":"9beb3854a87bf01f3aa6b629be3ef358e7047a4f1547a8fb1ac2418072620c06"} Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.588056 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"badb9016-4549-4b7d-841d-873ad822a442","Type":"ContainerStarted","Data":"ccf0025c22161c065f6c2fc88202515ee12bdd1cb7725913e23f2b34e8fe3b51"} Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.598814 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"badb9016-4549-4b7d-841d-873ad822a442","Type":"ContainerStarted","Data":"c7bbd6241afdb0b9684bc00137095f0112300402f35af83250a567cdab8ed27e"} Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.598840 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.598852 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" event={"ID":"24db082c-81fd-427d-a0b0-57b64adc6f73","Type":"ContainerStarted","Data":"319037cd7406a790025665fc0577d14227b3809f7fdca66bc36b95d28ac24dd3"} Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.607709 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.36047473 podStartE2EDuration="5.607690554s" podCreationTimestamp="2025-10-01 05:49:36 +0000 UTC" firstStartedPulling="2025-10-01 05:49:37.559744575 +0000 UTC m=+1226.497723189" lastFinishedPulling="2025-10-01 05:49:40.806960389 +0000 UTC m=+1229.744939013" observedRunningTime="2025-10-01 05:49:41.562863824 +0000 UTC m=+1230.500842448" watchObservedRunningTime="2025-10-01 05:49:41.607690554 +0000 UTC m=+1230.545669168" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.628808 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b958c7b7-f355-4af9-a33d-e95e2f1b5e02","Type":"ContainerDied","Data":"3430fa1d3ad628adb3b2c694b35f15f076f459d81f40750410aca3c5c1fccdd9"} Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.628856 4661 scope.go:117] "RemoveContainer" containerID="746a444b21dd8734930ba3638b27a9498bfdd3e0349ecc8873415f9f0e3e7c1a" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.629025 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.665278 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.609955949 podStartE2EDuration="5.665260291s" podCreationTimestamp="2025-10-01 05:49:36 +0000 UTC" firstStartedPulling="2025-10-01 05:49:37.751653467 +0000 UTC m=+1226.689632071" lastFinishedPulling="2025-10-01 05:49:40.806957799 +0000 UTC m=+1229.744936413" observedRunningTime="2025-10-01 05:49:41.588092631 +0000 UTC m=+1230.526071245" watchObservedRunningTime="2025-10-01 05:49:41.665260291 +0000 UTC m=+1230.603238905" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.681212 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-5r72j" podStartSLOduration=4.681194555 podStartE2EDuration="4.681194555s" podCreationTimestamp="2025-10-01 05:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:49:41.628423389 +0000 UTC m=+1230.566402003" watchObservedRunningTime="2025-10-01 05:49:41.681194555 +0000 UTC m=+1230.619173169" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.696748 4661 scope.go:117] "RemoveContainer" containerID="39deb66fa20a1c8728718fceded44a3ebf2246723a2d1469b6d2e9a36888ff14" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.721494 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.370810985 podStartE2EDuration="5.721475721s" podCreationTimestamp="2025-10-01 05:49:36 +0000 UTC" firstStartedPulling="2025-10-01 05:49:37.456267162 +0000 UTC m=+1226.394245776" lastFinishedPulling="2025-10-01 05:49:40.806931888 +0000 UTC m=+1229.744910512" observedRunningTime="2025-10-01 05:49:41.648455384 +0000 UTC m=+1230.586433988" watchObservedRunningTime="2025-10-01 05:49:41.721475721 +0000 UTC m=+1230.659454335" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.762995 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" podStartSLOduration=5.762979781 podStartE2EDuration="5.762979781s" podCreationTimestamp="2025-10-01 05:49:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:49:41.670002241 +0000 UTC m=+1230.607980855" watchObservedRunningTime="2025-10-01 05:49:41.762979781 +0000 UTC m=+1230.700958395" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.783511 4661 scope.go:117] "RemoveContainer" containerID="06b958966968ecde7f58e4712d5ad6e6c29db511fc718dbafc8ab1806e02f6d9" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.787845 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.4978270670000002 podStartE2EDuration="5.787826857s" podCreationTimestamp="2025-10-01 05:49:36 +0000 UTC" firstStartedPulling="2025-10-01 05:49:37.525801982 +0000 UTC m=+1226.463780596" lastFinishedPulling="2025-10-01 05:49:40.815801762 +0000 UTC m=+1229.753780386" observedRunningTime="2025-10-01 05:49:41.688508594 +0000 UTC m=+1230.626487208" watchObservedRunningTime="2025-10-01 05:49:41.787826857 +0000 UTC m=+1230.725805471" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.795897 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.807241 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.833000 4661 scope.go:117] "RemoveContainer" containerID="5d9d5191af7fdafaa6245701c19b5ccfd388e2d24756de94ba1c423e74929569" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.842139 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.873939 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:49:41 crc kubenswrapper[4661]: E1001 05:49:41.874434 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b958c7b7-f355-4af9-a33d-e95e2f1b5e02" containerName="proxy-httpd" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.874453 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="b958c7b7-f355-4af9-a33d-e95e2f1b5e02" containerName="proxy-httpd" Oct 01 05:49:41 crc kubenswrapper[4661]: E1001 05:49:41.874468 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b958c7b7-f355-4af9-a33d-e95e2f1b5e02" containerName="ceilometer-central-agent" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.874474 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="b958c7b7-f355-4af9-a33d-e95e2f1b5e02" containerName="ceilometer-central-agent" Oct 01 05:49:41 crc kubenswrapper[4661]: E1001 05:49:41.874505 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b958c7b7-f355-4af9-a33d-e95e2f1b5e02" containerName="sg-core" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.874512 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="b958c7b7-f355-4af9-a33d-e95e2f1b5e02" containerName="sg-core" Oct 01 05:49:41 crc kubenswrapper[4661]: E1001 05:49:41.874525 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b958c7b7-f355-4af9-a33d-e95e2f1b5e02" containerName="ceilometer-notification-agent" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.874531 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="b958c7b7-f355-4af9-a33d-e95e2f1b5e02" containerName="ceilometer-notification-agent" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.874751 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="b958c7b7-f355-4af9-a33d-e95e2f1b5e02" containerName="ceilometer-central-agent" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.874765 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="b958c7b7-f355-4af9-a33d-e95e2f1b5e02" containerName="sg-core" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.874774 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="b958c7b7-f355-4af9-a33d-e95e2f1b5e02" containerName="ceilometer-notification-agent" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.874793 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="b958c7b7-f355-4af9-a33d-e95e2f1b5e02" containerName="proxy-httpd" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.876693 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.880496 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.880647 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.888822 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.910119 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd19589d-f86d-440a-92a9-b6bf02e4989a-run-httpd\") pod \"ceilometer-0\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " pod="openstack/ceilometer-0" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.910164 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " pod="openstack/ceilometer-0" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.910213 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-config-data\") pod \"ceilometer-0\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " pod="openstack/ceilometer-0" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.910276 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-scripts\") pod \"ceilometer-0\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " pod="openstack/ceilometer-0" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.910295 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd19589d-f86d-440a-92a9-b6bf02e4989a-log-httpd\") pod \"ceilometer-0\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " pod="openstack/ceilometer-0" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.910321 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " pod="openstack/ceilometer-0" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.910393 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2k2bm\" (UniqueName: \"kubernetes.io/projected/fd19589d-f86d-440a-92a9-b6bf02e4989a-kube-api-access-2k2bm\") pod \"ceilometer-0\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " pod="openstack/ceilometer-0" Oct 01 05:49:41 crc kubenswrapper[4661]: I1001 05:49:41.924085 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:49:42 crc kubenswrapper[4661]: I1001 05:49:42.011926 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-scripts\") pod \"ceilometer-0\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " pod="openstack/ceilometer-0" Oct 01 05:49:42 crc kubenswrapper[4661]: I1001 05:49:42.012001 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd19589d-f86d-440a-92a9-b6bf02e4989a-log-httpd\") pod \"ceilometer-0\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " pod="openstack/ceilometer-0" Oct 01 05:49:42 crc kubenswrapper[4661]: I1001 05:49:42.012035 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " pod="openstack/ceilometer-0" Oct 01 05:49:42 crc kubenswrapper[4661]: I1001 05:49:42.012111 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2k2bm\" (UniqueName: \"kubernetes.io/projected/fd19589d-f86d-440a-92a9-b6bf02e4989a-kube-api-access-2k2bm\") pod \"ceilometer-0\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " pod="openstack/ceilometer-0" Oct 01 05:49:42 crc kubenswrapper[4661]: I1001 05:49:42.012141 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd19589d-f86d-440a-92a9-b6bf02e4989a-run-httpd\") pod \"ceilometer-0\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " pod="openstack/ceilometer-0" Oct 01 05:49:42 crc kubenswrapper[4661]: I1001 05:49:42.012173 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " pod="openstack/ceilometer-0" Oct 01 05:49:42 crc kubenswrapper[4661]: I1001 05:49:42.012229 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-config-data\") pod \"ceilometer-0\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " pod="openstack/ceilometer-0" Oct 01 05:49:42 crc kubenswrapper[4661]: I1001 05:49:42.012811 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd19589d-f86d-440a-92a9-b6bf02e4989a-run-httpd\") pod \"ceilometer-0\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " pod="openstack/ceilometer-0" Oct 01 05:49:42 crc kubenswrapper[4661]: I1001 05:49:42.012835 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd19589d-f86d-440a-92a9-b6bf02e4989a-log-httpd\") pod \"ceilometer-0\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " pod="openstack/ceilometer-0" Oct 01 05:49:42 crc kubenswrapper[4661]: I1001 05:49:42.017129 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-scripts\") pod \"ceilometer-0\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " pod="openstack/ceilometer-0" Oct 01 05:49:42 crc kubenswrapper[4661]: I1001 05:49:42.017957 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-config-data\") pod \"ceilometer-0\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " pod="openstack/ceilometer-0" Oct 01 05:49:42 crc kubenswrapper[4661]: I1001 05:49:42.019469 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " pod="openstack/ceilometer-0" Oct 01 05:49:42 crc kubenswrapper[4661]: I1001 05:49:42.019926 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " pod="openstack/ceilometer-0" Oct 01 05:49:42 crc kubenswrapper[4661]: I1001 05:49:42.036146 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2k2bm\" (UniqueName: \"kubernetes.io/projected/fd19589d-f86d-440a-92a9-b6bf02e4989a-kube-api-access-2k2bm\") pod \"ceilometer-0\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " pod="openstack/ceilometer-0" Oct 01 05:49:42 crc kubenswrapper[4661]: I1001 05:49:42.115771 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 01 05:49:42 crc kubenswrapper[4661]: I1001 05:49:42.116203 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 01 05:49:42 crc kubenswrapper[4661]: I1001 05:49:42.199232 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 05:49:42 crc kubenswrapper[4661]: I1001 05:49:42.651544 4661 generic.go:334] "Generic (PLEG): container finished" podID="7b1c1df6-54c2-46e2-9ff9-51d6837e7850" containerID="d775f51e44aa55626189c97e6f609f9ed50b2d49938eae6f84affe455c0d4e34" exitCode=143 Oct 01 05:49:42 crc kubenswrapper[4661]: I1001 05:49:42.651682 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7b1c1df6-54c2-46e2-9ff9-51d6837e7850","Type":"ContainerDied","Data":"d775f51e44aa55626189c97e6f609f9ed50b2d49938eae6f84affe455c0d4e34"} Oct 01 05:49:42 crc kubenswrapper[4661]: I1001 05:49:42.711051 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:49:43 crc kubenswrapper[4661]: I1001 05:49:43.699308 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fd19589d-f86d-440a-92a9-b6bf02e4989a","Type":"ContainerStarted","Data":"72e2ebd3f4ed64d1e0369371c926f4d0f8eedd59dadbc862a4a5535a7e40a269"} Oct 01 05:49:43 crc kubenswrapper[4661]: I1001 05:49:43.699730 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fd19589d-f86d-440a-92a9-b6bf02e4989a","Type":"ContainerStarted","Data":"880e3bdb36218da33344d7652c8ea6470a91fbd2fafe0a87e3aa5ee18c2507e5"} Oct 01 05:49:43 crc kubenswrapper[4661]: I1001 05:49:43.699745 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fd19589d-f86d-440a-92a9-b6bf02e4989a","Type":"ContainerStarted","Data":"e1096d6a659dd8c9c7e0f5f1e0abe589e5881a8f67a4b16cfd2a618843cc3a31"} Oct 01 05:49:43 crc kubenswrapper[4661]: I1001 05:49:43.772422 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b958c7b7-f355-4af9-a33d-e95e2f1b5e02" path="/var/lib/kubelet/pods/b958c7b7-f355-4af9-a33d-e95e2f1b5e02/volumes" Oct 01 05:49:44 crc kubenswrapper[4661]: I1001 05:49:44.709527 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fd19589d-f86d-440a-92a9-b6bf02e4989a","Type":"ContainerStarted","Data":"946fb21648cc87a3d94219ba4bbdce4d6a43014fb5734ce3808a64705986b318"} Oct 01 05:49:46 crc kubenswrapper[4661]: I1001 05:49:46.738814 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fd19589d-f86d-440a-92a9-b6bf02e4989a","Type":"ContainerStarted","Data":"b28e786d900403a5852a547e8857b046f109ea3b6bce3c41cab351e67b49912b"} Oct 01 05:49:46 crc kubenswrapper[4661]: I1001 05:49:46.739230 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 01 05:49:46 crc kubenswrapper[4661]: I1001 05:49:46.742752 4661 generic.go:334] "Generic (PLEG): container finished" podID="2410d057-e29d-4a70-bc09-c692348e5018" containerID="de75f6f845cb37f06c30281e0964a1e75c66d66508d4747e00fc45a8219876bf" exitCode=0 Oct 01 05:49:46 crc kubenswrapper[4661]: I1001 05:49:46.742820 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-csgws" event={"ID":"2410d057-e29d-4a70-bc09-c692348e5018","Type":"ContainerDied","Data":"de75f6f845cb37f06c30281e0964a1e75c66d66508d4747e00fc45a8219876bf"} Oct 01 05:49:46 crc kubenswrapper[4661]: I1001 05:49:46.776326 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.796189403 podStartE2EDuration="5.776299536s" podCreationTimestamp="2025-10-01 05:49:41 +0000 UTC" firstStartedPulling="2025-10-01 05:49:42.72641336 +0000 UTC m=+1231.664391974" lastFinishedPulling="2025-10-01 05:49:45.706523493 +0000 UTC m=+1234.644502107" observedRunningTime="2025-10-01 05:49:46.76504322 +0000 UTC m=+1235.703021844" watchObservedRunningTime="2025-10-01 05:49:46.776299536 +0000 UTC m=+1235.714278180" Oct 01 05:49:46 crc kubenswrapper[4661]: I1001 05:49:46.779395 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 01 05:49:46 crc kubenswrapper[4661]: I1001 05:49:46.827257 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 01 05:49:46 crc kubenswrapper[4661]: I1001 05:49:46.897108 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 01 05:49:46 crc kubenswrapper[4661]: I1001 05:49:46.897173 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 01 05:49:47 crc kubenswrapper[4661]: I1001 05:49:47.134890 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:49:47 crc kubenswrapper[4661]: I1001 05:49:47.198860 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75958fc765-qfvf9"] Oct 01 05:49:47 crc kubenswrapper[4661]: I1001 05:49:47.199093 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-75958fc765-qfvf9" podUID="c221754f-65eb-4272-b438-d2d8591c3645" containerName="dnsmasq-dns" containerID="cri-o://6408279bd738f019edcdba2778cb9ff3401c2c27993c82fa50c6f8b1e4f860d5" gracePeriod=10 Oct 01 05:49:47 crc kubenswrapper[4661]: I1001 05:49:47.769334 4661 generic.go:334] "Generic (PLEG): container finished" podID="c221754f-65eb-4272-b438-d2d8591c3645" containerID="6408279bd738f019edcdba2778cb9ff3401c2c27993c82fa50c6f8b1e4f860d5" exitCode=0 Oct 01 05:49:47 crc kubenswrapper[4661]: I1001 05:49:47.770656 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75958fc765-qfvf9" event={"ID":"c221754f-65eb-4272-b438-d2d8591c3645","Type":"ContainerDied","Data":"6408279bd738f019edcdba2778cb9ff3401c2c27993c82fa50c6f8b1e4f860d5"} Oct 01 05:49:47 crc kubenswrapper[4661]: I1001 05:49:47.817838 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 01 05:49:47 crc kubenswrapper[4661]: I1001 05:49:47.877219 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:49:47 crc kubenswrapper[4661]: I1001 05:49:47.892836 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-ovsdbserver-sb\") pod \"c221754f-65eb-4272-b438-d2d8591c3645\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " Oct 01 05:49:47 crc kubenswrapper[4661]: I1001 05:49:47.892892 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-config\") pod \"c221754f-65eb-4272-b438-d2d8591c3645\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " Oct 01 05:49:47 crc kubenswrapper[4661]: I1001 05:49:47.893845 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-dns-svc\") pod \"c221754f-65eb-4272-b438-d2d8591c3645\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " Oct 01 05:49:47 crc kubenswrapper[4661]: I1001 05:49:47.893975 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-dns-swift-storage-0\") pod \"c221754f-65eb-4272-b438-d2d8591c3645\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " Oct 01 05:49:47 crc kubenswrapper[4661]: I1001 05:49:47.894303 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-ovsdbserver-nb\") pod \"c221754f-65eb-4272-b438-d2d8591c3645\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " Oct 01 05:49:47 crc kubenswrapper[4661]: I1001 05:49:47.894353 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4gz9l\" (UniqueName: \"kubernetes.io/projected/c221754f-65eb-4272-b438-d2d8591c3645-kube-api-access-4gz9l\") pod \"c221754f-65eb-4272-b438-d2d8591c3645\" (UID: \"c221754f-65eb-4272-b438-d2d8591c3645\") " Oct 01 05:49:47 crc kubenswrapper[4661]: I1001 05:49:47.915794 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c221754f-65eb-4272-b438-d2d8591c3645-kube-api-access-4gz9l" (OuterVolumeSpecName: "kube-api-access-4gz9l") pod "c221754f-65eb-4272-b438-d2d8591c3645" (UID: "c221754f-65eb-4272-b438-d2d8591c3645"). InnerVolumeSpecName "kube-api-access-4gz9l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:49:47 crc kubenswrapper[4661]: I1001 05:49:47.984787 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="badb9016-4549-4b7d-841d-873ad822a442" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.204:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 05:49:47 crc kubenswrapper[4661]: I1001 05:49:47.985017 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="badb9016-4549-4b7d-841d-873ad822a442" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.204:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 05:49:47 crc kubenswrapper[4661]: I1001 05:49:47.995595 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4gz9l\" (UniqueName: \"kubernetes.io/projected/c221754f-65eb-4272-b438-d2d8591c3645-kube-api-access-4gz9l\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.002374 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c221754f-65eb-4272-b438-d2d8591c3645" (UID: "c221754f-65eb-4272-b438-d2d8591c3645"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.008625 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c221754f-65eb-4272-b438-d2d8591c3645" (UID: "c221754f-65eb-4272-b438-d2d8591c3645"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.018686 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c221754f-65eb-4272-b438-d2d8591c3645" (UID: "c221754f-65eb-4272-b438-d2d8591c3645"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.034076 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c221754f-65eb-4272-b438-d2d8591c3645" (UID: "c221754f-65eb-4272-b438-d2d8591c3645"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.040763 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-config" (OuterVolumeSpecName: "config") pod "c221754f-65eb-4272-b438-d2d8591c3645" (UID: "c221754f-65eb-4272-b438-d2d8591c3645"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.096771 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.096798 4661 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.096808 4661 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.096821 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.096906 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c221754f-65eb-4272-b438-d2d8591c3645-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.106103 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-csgws" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.198278 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7n2m\" (UniqueName: \"kubernetes.io/projected/2410d057-e29d-4a70-bc09-c692348e5018-kube-api-access-j7n2m\") pod \"2410d057-e29d-4a70-bc09-c692348e5018\" (UID: \"2410d057-e29d-4a70-bc09-c692348e5018\") " Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.198671 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2410d057-e29d-4a70-bc09-c692348e5018-config-data\") pod \"2410d057-e29d-4a70-bc09-c692348e5018\" (UID: \"2410d057-e29d-4a70-bc09-c692348e5018\") " Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.198723 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2410d057-e29d-4a70-bc09-c692348e5018-scripts\") pod \"2410d057-e29d-4a70-bc09-c692348e5018\" (UID: \"2410d057-e29d-4a70-bc09-c692348e5018\") " Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.198789 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2410d057-e29d-4a70-bc09-c692348e5018-combined-ca-bundle\") pod \"2410d057-e29d-4a70-bc09-c692348e5018\" (UID: \"2410d057-e29d-4a70-bc09-c692348e5018\") " Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.207876 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2410d057-e29d-4a70-bc09-c692348e5018-scripts" (OuterVolumeSpecName: "scripts") pod "2410d057-e29d-4a70-bc09-c692348e5018" (UID: "2410d057-e29d-4a70-bc09-c692348e5018"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.209858 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2410d057-e29d-4a70-bc09-c692348e5018-kube-api-access-j7n2m" (OuterVolumeSpecName: "kube-api-access-j7n2m") pod "2410d057-e29d-4a70-bc09-c692348e5018" (UID: "2410d057-e29d-4a70-bc09-c692348e5018"). InnerVolumeSpecName "kube-api-access-j7n2m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.238875 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2410d057-e29d-4a70-bc09-c692348e5018-config-data" (OuterVolumeSpecName: "config-data") pod "2410d057-e29d-4a70-bc09-c692348e5018" (UID: "2410d057-e29d-4a70-bc09-c692348e5018"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.255719 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2410d057-e29d-4a70-bc09-c692348e5018-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2410d057-e29d-4a70-bc09-c692348e5018" (UID: "2410d057-e29d-4a70-bc09-c692348e5018"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.300422 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2410d057-e29d-4a70-bc09-c692348e5018-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.300454 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7n2m\" (UniqueName: \"kubernetes.io/projected/2410d057-e29d-4a70-bc09-c692348e5018-kube-api-access-j7n2m\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.300465 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2410d057-e29d-4a70-bc09-c692348e5018-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.300474 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2410d057-e29d-4a70-bc09-c692348e5018-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.793954 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-csgws" event={"ID":"2410d057-e29d-4a70-bc09-c692348e5018","Type":"ContainerDied","Data":"05ea510cc9e3b6b955d7018005f7209df9985c9cb61bbecbef94c19d359e5b86"} Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.795736 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="05ea510cc9e3b6b955d7018005f7209df9985c9cb61bbecbef94c19d359e5b86" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.794168 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-csgws" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.796694 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75958fc765-qfvf9" event={"ID":"c221754f-65eb-4272-b438-d2d8591c3645","Type":"ContainerDied","Data":"586b8fb1c3e5a8cf38a7020216e9ed271340bc7b6680dc771e6efcaa6a46c189"} Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.796717 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75958fc765-qfvf9" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.796754 4661 scope.go:117] "RemoveContainer" containerID="6408279bd738f019edcdba2778cb9ff3401c2c27993c82fa50c6f8b1e4f860d5" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.845513 4661 scope.go:117] "RemoveContainer" containerID="1efd1538ecb45e0543f9800d5d97592018eb1699c5cb24ba2c5ef1aa4c511481" Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.855384 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75958fc765-qfvf9"] Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.866119 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75958fc765-qfvf9"] Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.982383 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.982989 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="badb9016-4549-4b7d-841d-873ad822a442" containerName="nova-api-log" containerID="cri-o://c7bbd6241afdb0b9684bc00137095f0112300402f35af83250a567cdab8ed27e" gracePeriod=30 Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.983434 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="badb9016-4549-4b7d-841d-873ad822a442" containerName="nova-api-api" containerID="cri-o://ccf0025c22161c065f6c2fc88202515ee12bdd1cb7725913e23f2b34e8fe3b51" gracePeriod=30 Oct 01 05:49:48 crc kubenswrapper[4661]: I1001 05:49:48.993792 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 05:49:49 crc kubenswrapper[4661]: I1001 05:49:49.772728 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c221754f-65eb-4272-b438-d2d8591c3645" path="/var/lib/kubelet/pods/c221754f-65eb-4272-b438-d2d8591c3645/volumes" Oct 01 05:49:49 crc kubenswrapper[4661]: I1001 05:49:49.815204 4661 generic.go:334] "Generic (PLEG): container finished" podID="badb9016-4549-4b7d-841d-873ad822a442" containerID="c7bbd6241afdb0b9684bc00137095f0112300402f35af83250a567cdab8ed27e" exitCode=143 Oct 01 05:49:49 crc kubenswrapper[4661]: I1001 05:49:49.815328 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"badb9016-4549-4b7d-841d-873ad822a442","Type":"ContainerDied","Data":"c7bbd6241afdb0b9684bc00137095f0112300402f35af83250a567cdab8ed27e"} Oct 01 05:49:49 crc kubenswrapper[4661]: I1001 05:49:49.815378 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="232f0a78-781e-48d2-a8ca-aab89e6cb3c8" containerName="nova-scheduler-scheduler" containerID="cri-o://62e20d27697a31d2ec91ee3cd5379262fb201b6ffb98a6b34fea475c522749d5" gracePeriod=30 Oct 01 05:49:50 crc kubenswrapper[4661]: I1001 05:49:50.843714 4661 generic.go:334] "Generic (PLEG): container finished" podID="bf9833ec-089c-4b0c-94b6-f04b7d7773ae" containerID="9beb3854a87bf01f3aa6b629be3ef358e7047a4f1547a8fb1ac2418072620c06" exitCode=0 Oct 01 05:49:50 crc kubenswrapper[4661]: I1001 05:49:50.844520 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-5r72j" event={"ID":"bf9833ec-089c-4b0c-94b6-f04b7d7773ae","Type":"ContainerDied","Data":"9beb3854a87bf01f3aa6b629be3ef358e7047a4f1547a8fb1ac2418072620c06"} Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.668592 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.766096 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/badb9016-4549-4b7d-841d-873ad822a442-logs\") pod \"badb9016-4549-4b7d-841d-873ad822a442\" (UID: \"badb9016-4549-4b7d-841d-873ad822a442\") " Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.766378 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/badb9016-4549-4b7d-841d-873ad822a442-config-data\") pod \"badb9016-4549-4b7d-841d-873ad822a442\" (UID: \"badb9016-4549-4b7d-841d-873ad822a442\") " Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.766447 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/badb9016-4549-4b7d-841d-873ad822a442-combined-ca-bundle\") pod \"badb9016-4549-4b7d-841d-873ad822a442\" (UID: \"badb9016-4549-4b7d-841d-873ad822a442\") " Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.766492 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8qlw\" (UniqueName: \"kubernetes.io/projected/badb9016-4549-4b7d-841d-873ad822a442-kube-api-access-w8qlw\") pod \"badb9016-4549-4b7d-841d-873ad822a442\" (UID: \"badb9016-4549-4b7d-841d-873ad822a442\") " Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.767604 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/badb9016-4549-4b7d-841d-873ad822a442-logs" (OuterVolumeSpecName: "logs") pod "badb9016-4549-4b7d-841d-873ad822a442" (UID: "badb9016-4549-4b7d-841d-873ad822a442"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.780928 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/badb9016-4549-4b7d-841d-873ad822a442-kube-api-access-w8qlw" (OuterVolumeSpecName: "kube-api-access-w8qlw") pod "badb9016-4549-4b7d-841d-873ad822a442" (UID: "badb9016-4549-4b7d-841d-873ad822a442"). InnerVolumeSpecName "kube-api-access-w8qlw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:49:51 crc kubenswrapper[4661]: E1001 05:49:51.784246 4661 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="62e20d27697a31d2ec91ee3cd5379262fb201b6ffb98a6b34fea475c522749d5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 01 05:49:51 crc kubenswrapper[4661]: E1001 05:49:51.789178 4661 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="62e20d27697a31d2ec91ee3cd5379262fb201b6ffb98a6b34fea475c522749d5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 01 05:49:51 crc kubenswrapper[4661]: E1001 05:49:51.791256 4661 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="62e20d27697a31d2ec91ee3cd5379262fb201b6ffb98a6b34fea475c522749d5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 01 05:49:51 crc kubenswrapper[4661]: E1001 05:49:51.791303 4661 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="232f0a78-781e-48d2-a8ca-aab89e6cb3c8" containerName="nova-scheduler-scheduler" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.816447 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/badb9016-4549-4b7d-841d-873ad822a442-config-data" (OuterVolumeSpecName: "config-data") pod "badb9016-4549-4b7d-841d-873ad822a442" (UID: "badb9016-4549-4b7d-841d-873ad822a442"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.826659 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/badb9016-4549-4b7d-841d-873ad822a442-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "badb9016-4549-4b7d-841d-873ad822a442" (UID: "badb9016-4549-4b7d-841d-873ad822a442"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.863106 4661 generic.go:334] "Generic (PLEG): container finished" podID="badb9016-4549-4b7d-841d-873ad822a442" containerID="ccf0025c22161c065f6c2fc88202515ee12bdd1cb7725913e23f2b34e8fe3b51" exitCode=0 Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.863370 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.872577 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"badb9016-4549-4b7d-841d-873ad822a442","Type":"ContainerDied","Data":"ccf0025c22161c065f6c2fc88202515ee12bdd1cb7725913e23f2b34e8fe3b51"} Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.872647 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"badb9016-4549-4b7d-841d-873ad822a442","Type":"ContainerDied","Data":"b0b1455b38828f0f12d23967389eac35190f540d7a06019e5f23b8f92daf05f5"} Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.872671 4661 scope.go:117] "RemoveContainer" containerID="ccf0025c22161c065f6c2fc88202515ee12bdd1cb7725913e23f2b34e8fe3b51" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.875812 4661 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/badb9016-4549-4b7d-841d-873ad822a442-logs\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.875858 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/badb9016-4549-4b7d-841d-873ad822a442-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.875880 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/badb9016-4549-4b7d-841d-873ad822a442-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.875902 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8qlw\" (UniqueName: \"kubernetes.io/projected/badb9016-4549-4b7d-841d-873ad822a442-kube-api-access-w8qlw\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.926930 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.930915 4661 scope.go:117] "RemoveContainer" containerID="c7bbd6241afdb0b9684bc00137095f0112300402f35af83250a567cdab8ed27e" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.943390 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.957698 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 01 05:49:51 crc kubenswrapper[4661]: E1001 05:49:51.958072 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c221754f-65eb-4272-b438-d2d8591c3645" containerName="dnsmasq-dns" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.958085 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="c221754f-65eb-4272-b438-d2d8591c3645" containerName="dnsmasq-dns" Oct 01 05:49:51 crc kubenswrapper[4661]: E1001 05:49:51.958100 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2410d057-e29d-4a70-bc09-c692348e5018" containerName="nova-manage" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.958106 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="2410d057-e29d-4a70-bc09-c692348e5018" containerName="nova-manage" Oct 01 05:49:51 crc kubenswrapper[4661]: E1001 05:49:51.958129 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c221754f-65eb-4272-b438-d2d8591c3645" containerName="init" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.958136 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="c221754f-65eb-4272-b438-d2d8591c3645" containerName="init" Oct 01 05:49:51 crc kubenswrapper[4661]: E1001 05:49:51.958149 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="badb9016-4549-4b7d-841d-873ad822a442" containerName="nova-api-api" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.958155 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="badb9016-4549-4b7d-841d-873ad822a442" containerName="nova-api-api" Oct 01 05:49:51 crc kubenswrapper[4661]: E1001 05:49:51.958169 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="badb9016-4549-4b7d-841d-873ad822a442" containerName="nova-api-log" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.958174 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="badb9016-4549-4b7d-841d-873ad822a442" containerName="nova-api-log" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.958336 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="badb9016-4549-4b7d-841d-873ad822a442" containerName="nova-api-log" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.958350 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="badb9016-4549-4b7d-841d-873ad822a442" containerName="nova-api-api" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.958359 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="2410d057-e29d-4a70-bc09-c692348e5018" containerName="nova-manage" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.958368 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="c221754f-65eb-4272-b438-d2d8591c3645" containerName="dnsmasq-dns" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.959305 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.959391 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.975739 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.977454 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ce9ee53-bca9-42ec-813c-b6c26e231231-logs\") pod \"nova-api-0\" (UID: \"6ce9ee53-bca9-42ec-813c-b6c26e231231\") " pod="openstack/nova-api-0" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.977493 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ce9ee53-bca9-42ec-813c-b6c26e231231-config-data\") pod \"nova-api-0\" (UID: \"6ce9ee53-bca9-42ec-813c-b6c26e231231\") " pod="openstack/nova-api-0" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.977611 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9wj8\" (UniqueName: \"kubernetes.io/projected/6ce9ee53-bca9-42ec-813c-b6c26e231231-kube-api-access-c9wj8\") pod \"nova-api-0\" (UID: \"6ce9ee53-bca9-42ec-813c-b6c26e231231\") " pod="openstack/nova-api-0" Oct 01 05:49:51 crc kubenswrapper[4661]: I1001 05:49:51.977662 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ce9ee53-bca9-42ec-813c-b6c26e231231-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6ce9ee53-bca9-42ec-813c-b6c26e231231\") " pod="openstack/nova-api-0" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.005523 4661 scope.go:117] "RemoveContainer" containerID="ccf0025c22161c065f6c2fc88202515ee12bdd1cb7725913e23f2b34e8fe3b51" Oct 01 05:49:52 crc kubenswrapper[4661]: E1001 05:49:52.006050 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ccf0025c22161c065f6c2fc88202515ee12bdd1cb7725913e23f2b34e8fe3b51\": container with ID starting with ccf0025c22161c065f6c2fc88202515ee12bdd1cb7725913e23f2b34e8fe3b51 not found: ID does not exist" containerID="ccf0025c22161c065f6c2fc88202515ee12bdd1cb7725913e23f2b34e8fe3b51" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.006090 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccf0025c22161c065f6c2fc88202515ee12bdd1cb7725913e23f2b34e8fe3b51"} err="failed to get container status \"ccf0025c22161c065f6c2fc88202515ee12bdd1cb7725913e23f2b34e8fe3b51\": rpc error: code = NotFound desc = could not find container \"ccf0025c22161c065f6c2fc88202515ee12bdd1cb7725913e23f2b34e8fe3b51\": container with ID starting with ccf0025c22161c065f6c2fc88202515ee12bdd1cb7725913e23f2b34e8fe3b51 not found: ID does not exist" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.006119 4661 scope.go:117] "RemoveContainer" containerID="c7bbd6241afdb0b9684bc00137095f0112300402f35af83250a567cdab8ed27e" Oct 01 05:49:52 crc kubenswrapper[4661]: E1001 05:49:52.006416 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7bbd6241afdb0b9684bc00137095f0112300402f35af83250a567cdab8ed27e\": container with ID starting with c7bbd6241afdb0b9684bc00137095f0112300402f35af83250a567cdab8ed27e not found: ID does not exist" containerID="c7bbd6241afdb0b9684bc00137095f0112300402f35af83250a567cdab8ed27e" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.006454 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7bbd6241afdb0b9684bc00137095f0112300402f35af83250a567cdab8ed27e"} err="failed to get container status \"c7bbd6241afdb0b9684bc00137095f0112300402f35af83250a567cdab8ed27e\": rpc error: code = NotFound desc = could not find container \"c7bbd6241afdb0b9684bc00137095f0112300402f35af83250a567cdab8ed27e\": container with ID starting with c7bbd6241afdb0b9684bc00137095f0112300402f35af83250a567cdab8ed27e not found: ID does not exist" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.078961 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ce9ee53-bca9-42ec-813c-b6c26e231231-logs\") pod \"nova-api-0\" (UID: \"6ce9ee53-bca9-42ec-813c-b6c26e231231\") " pod="openstack/nova-api-0" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.079011 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ce9ee53-bca9-42ec-813c-b6c26e231231-config-data\") pod \"nova-api-0\" (UID: \"6ce9ee53-bca9-42ec-813c-b6c26e231231\") " pod="openstack/nova-api-0" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.079100 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9wj8\" (UniqueName: \"kubernetes.io/projected/6ce9ee53-bca9-42ec-813c-b6c26e231231-kube-api-access-c9wj8\") pod \"nova-api-0\" (UID: \"6ce9ee53-bca9-42ec-813c-b6c26e231231\") " pod="openstack/nova-api-0" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.079136 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ce9ee53-bca9-42ec-813c-b6c26e231231-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6ce9ee53-bca9-42ec-813c-b6c26e231231\") " pod="openstack/nova-api-0" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.079813 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ce9ee53-bca9-42ec-813c-b6c26e231231-logs\") pod \"nova-api-0\" (UID: \"6ce9ee53-bca9-42ec-813c-b6c26e231231\") " pod="openstack/nova-api-0" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.082825 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ce9ee53-bca9-42ec-813c-b6c26e231231-config-data\") pod \"nova-api-0\" (UID: \"6ce9ee53-bca9-42ec-813c-b6c26e231231\") " pod="openstack/nova-api-0" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.084298 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ce9ee53-bca9-42ec-813c-b6c26e231231-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6ce9ee53-bca9-42ec-813c-b6c26e231231\") " pod="openstack/nova-api-0" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.100259 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9wj8\" (UniqueName: \"kubernetes.io/projected/6ce9ee53-bca9-42ec-813c-b6c26e231231-kube-api-access-c9wj8\") pod \"nova-api-0\" (UID: \"6ce9ee53-bca9-42ec-813c-b6c26e231231\") " pod="openstack/nova-api-0" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.281584 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-5r72j" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.282327 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8228f\" (UniqueName: \"kubernetes.io/projected/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-kube-api-access-8228f\") pod \"bf9833ec-089c-4b0c-94b6-f04b7d7773ae\" (UID: \"bf9833ec-089c-4b0c-94b6-f04b7d7773ae\") " Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.282414 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-config-data\") pod \"bf9833ec-089c-4b0c-94b6-f04b7d7773ae\" (UID: \"bf9833ec-089c-4b0c-94b6-f04b7d7773ae\") " Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.289004 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-kube-api-access-8228f" (OuterVolumeSpecName: "kube-api-access-8228f") pod "bf9833ec-089c-4b0c-94b6-f04b7d7773ae" (UID: "bf9833ec-089c-4b0c-94b6-f04b7d7773ae"). InnerVolumeSpecName "kube-api-access-8228f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.290406 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.351949 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-config-data" (OuterVolumeSpecName: "config-data") pod "bf9833ec-089c-4b0c-94b6-f04b7d7773ae" (UID: "bf9833ec-089c-4b0c-94b6-f04b7d7773ae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.383861 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-scripts\") pod \"bf9833ec-089c-4b0c-94b6-f04b7d7773ae\" (UID: \"bf9833ec-089c-4b0c-94b6-f04b7d7773ae\") " Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.383959 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-combined-ca-bundle\") pod \"bf9833ec-089c-4b0c-94b6-f04b7d7773ae\" (UID: \"bf9833ec-089c-4b0c-94b6-f04b7d7773ae\") " Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.384650 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.384680 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8228f\" (UniqueName: \"kubernetes.io/projected/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-kube-api-access-8228f\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.387303 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-scripts" (OuterVolumeSpecName: "scripts") pod "bf9833ec-089c-4b0c-94b6-f04b7d7773ae" (UID: "bf9833ec-089c-4b0c-94b6-f04b7d7773ae"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.426244 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bf9833ec-089c-4b0c-94b6-f04b7d7773ae" (UID: "bf9833ec-089c-4b0c-94b6-f04b7d7773ae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.487186 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.487223 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf9833ec-089c-4b0c-94b6-f04b7d7773ae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.789366 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.877660 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-5r72j" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.877628 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-5r72j" event={"ID":"bf9833ec-089c-4b0c-94b6-f04b7d7773ae","Type":"ContainerDied","Data":"fd51bc2fe4385ae4608492f71ca2d73d3771aed6ea0b5733b3fb66e2879a0aef"} Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.877893 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fd51bc2fe4385ae4608492f71ca2d73d3771aed6ea0b5733b3fb66e2879a0aef" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.881252 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6ce9ee53-bca9-42ec-813c-b6c26e231231","Type":"ContainerStarted","Data":"398ae6c5bf984759eea7e9707daa4ee3fc338861039e22b559f4096b07c3d305"} Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.977695 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 01 05:49:52 crc kubenswrapper[4661]: E1001 05:49:52.978285 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf9833ec-089c-4b0c-94b6-f04b7d7773ae" containerName="nova-cell1-conductor-db-sync" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.978310 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf9833ec-089c-4b0c-94b6-f04b7d7773ae" containerName="nova-cell1-conductor-db-sync" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.978545 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf9833ec-089c-4b0c-94b6-f04b7d7773ae" containerName="nova-cell1-conductor-db-sync" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.979425 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 01 05:49:52 crc kubenswrapper[4661]: I1001 05:49:52.981857 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.004106 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.097896 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12ce0bf4-4fb7-44da-87d4-9592ef8848a1-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"12ce0bf4-4fb7-44da-87d4-9592ef8848a1\") " pod="openstack/nova-cell1-conductor-0" Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.098610 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12ce0bf4-4fb7-44da-87d4-9592ef8848a1-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"12ce0bf4-4fb7-44da-87d4-9592ef8848a1\") " pod="openstack/nova-cell1-conductor-0" Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.099394 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7m7ms\" (UniqueName: \"kubernetes.io/projected/12ce0bf4-4fb7-44da-87d4-9592ef8848a1-kube-api-access-7m7ms\") pod \"nova-cell1-conductor-0\" (UID: \"12ce0bf4-4fb7-44da-87d4-9592ef8848a1\") " pod="openstack/nova-cell1-conductor-0" Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.201647 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12ce0bf4-4fb7-44da-87d4-9592ef8848a1-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"12ce0bf4-4fb7-44da-87d4-9592ef8848a1\") " pod="openstack/nova-cell1-conductor-0" Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.202603 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7m7ms\" (UniqueName: \"kubernetes.io/projected/12ce0bf4-4fb7-44da-87d4-9592ef8848a1-kube-api-access-7m7ms\") pod \"nova-cell1-conductor-0\" (UID: \"12ce0bf4-4fb7-44da-87d4-9592ef8848a1\") " pod="openstack/nova-cell1-conductor-0" Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.202847 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12ce0bf4-4fb7-44da-87d4-9592ef8848a1-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"12ce0bf4-4fb7-44da-87d4-9592ef8848a1\") " pod="openstack/nova-cell1-conductor-0" Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.207812 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12ce0bf4-4fb7-44da-87d4-9592ef8848a1-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"12ce0bf4-4fb7-44da-87d4-9592ef8848a1\") " pod="openstack/nova-cell1-conductor-0" Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.207894 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12ce0bf4-4fb7-44da-87d4-9592ef8848a1-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"12ce0bf4-4fb7-44da-87d4-9592ef8848a1\") " pod="openstack/nova-cell1-conductor-0" Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.225622 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7m7ms\" (UniqueName: \"kubernetes.io/projected/12ce0bf4-4fb7-44da-87d4-9592ef8848a1-kube-api-access-7m7ms\") pod \"nova-cell1-conductor-0\" (UID: \"12ce0bf4-4fb7-44da-87d4-9592ef8848a1\") " pod="openstack/nova-cell1-conductor-0" Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.308158 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.755888 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.773691 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="badb9016-4549-4b7d-841d-873ad822a442" path="/var/lib/kubelet/pods/badb9016-4549-4b7d-841d-873ad822a442/volumes" Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.894096 4661 generic.go:334] "Generic (PLEG): container finished" podID="232f0a78-781e-48d2-a8ca-aab89e6cb3c8" containerID="62e20d27697a31d2ec91ee3cd5379262fb201b6ffb98a6b34fea475c522749d5" exitCode=0 Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.894162 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"232f0a78-781e-48d2-a8ca-aab89e6cb3c8","Type":"ContainerDied","Data":"62e20d27697a31d2ec91ee3cd5379262fb201b6ffb98a6b34fea475c522749d5"} Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.894196 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"232f0a78-781e-48d2-a8ca-aab89e6cb3c8","Type":"ContainerDied","Data":"995acb19c4a1d10226effed4dd08f47567a4d2a00590414aea48a99d8ba3ccdd"} Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.894207 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.894217 4661 scope.go:117] "RemoveContainer" containerID="62e20d27697a31d2ec91ee3cd5379262fb201b6ffb98a6b34fea475c522749d5" Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.896352 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6ce9ee53-bca9-42ec-813c-b6c26e231231","Type":"ContainerStarted","Data":"f644c4edec6f071f3da775605d3e9aa59a604852e5c060fb9ad74c1831d36452"} Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.896378 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6ce9ee53-bca9-42ec-813c-b6c26e231231","Type":"ContainerStarted","Data":"3e9c0c7b85876020250a0825dbb4c583fbe1197339921e7d2e6dd3156ebe7149"} Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.903735 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 01 05:49:53 crc kubenswrapper[4661]: W1001 05:49:53.905987 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod12ce0bf4_4fb7_44da_87d4_9592ef8848a1.slice/crio-b71ff6e4153753630486f4cf4dc92012b692837938d349150f56ac6a4d77f96b WatchSource:0}: Error finding container b71ff6e4153753630486f4cf4dc92012b692837938d349150f56ac6a4d77f96b: Status 404 returned error can't find the container with id b71ff6e4153753630486f4cf4dc92012b692837938d349150f56ac6a4d77f96b Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.916580 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9hg2\" (UniqueName: \"kubernetes.io/projected/232f0a78-781e-48d2-a8ca-aab89e6cb3c8-kube-api-access-q9hg2\") pod \"232f0a78-781e-48d2-a8ca-aab89e6cb3c8\" (UID: \"232f0a78-781e-48d2-a8ca-aab89e6cb3c8\") " Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.916818 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/232f0a78-781e-48d2-a8ca-aab89e6cb3c8-config-data\") pod \"232f0a78-781e-48d2-a8ca-aab89e6cb3c8\" (UID: \"232f0a78-781e-48d2-a8ca-aab89e6cb3c8\") " Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.916880 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/232f0a78-781e-48d2-a8ca-aab89e6cb3c8-combined-ca-bundle\") pod \"232f0a78-781e-48d2-a8ca-aab89e6cb3c8\" (UID: \"232f0a78-781e-48d2-a8ca-aab89e6cb3c8\") " Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.917425 4661 scope.go:117] "RemoveContainer" containerID="62e20d27697a31d2ec91ee3cd5379262fb201b6ffb98a6b34fea475c522749d5" Oct 01 05:49:53 crc kubenswrapper[4661]: E1001 05:49:53.917994 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62e20d27697a31d2ec91ee3cd5379262fb201b6ffb98a6b34fea475c522749d5\": container with ID starting with 62e20d27697a31d2ec91ee3cd5379262fb201b6ffb98a6b34fea475c522749d5 not found: ID does not exist" containerID="62e20d27697a31d2ec91ee3cd5379262fb201b6ffb98a6b34fea475c522749d5" Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.918049 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62e20d27697a31d2ec91ee3cd5379262fb201b6ffb98a6b34fea475c522749d5"} err="failed to get container status \"62e20d27697a31d2ec91ee3cd5379262fb201b6ffb98a6b34fea475c522749d5\": rpc error: code = NotFound desc = could not find container \"62e20d27697a31d2ec91ee3cd5379262fb201b6ffb98a6b34fea475c522749d5\": container with ID starting with 62e20d27697a31d2ec91ee3cd5379262fb201b6ffb98a6b34fea475c522749d5 not found: ID does not exist" Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.927095 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.927077042 podStartE2EDuration="2.927077042s" podCreationTimestamp="2025-10-01 05:49:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:49:53.923533476 +0000 UTC m=+1242.861512100" watchObservedRunningTime="2025-10-01 05:49:53.927077042 +0000 UTC m=+1242.865055666" Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.927743 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/232f0a78-781e-48d2-a8ca-aab89e6cb3c8-kube-api-access-q9hg2" (OuterVolumeSpecName: "kube-api-access-q9hg2") pod "232f0a78-781e-48d2-a8ca-aab89e6cb3c8" (UID: "232f0a78-781e-48d2-a8ca-aab89e6cb3c8"). InnerVolumeSpecName "kube-api-access-q9hg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.947744 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/232f0a78-781e-48d2-a8ca-aab89e6cb3c8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "232f0a78-781e-48d2-a8ca-aab89e6cb3c8" (UID: "232f0a78-781e-48d2-a8ca-aab89e6cb3c8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:53 crc kubenswrapper[4661]: I1001 05:49:53.963832 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/232f0a78-781e-48d2-a8ca-aab89e6cb3c8-config-data" (OuterVolumeSpecName: "config-data") pod "232f0a78-781e-48d2-a8ca-aab89e6cb3c8" (UID: "232f0a78-781e-48d2-a8ca-aab89e6cb3c8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.019896 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9hg2\" (UniqueName: \"kubernetes.io/projected/232f0a78-781e-48d2-a8ca-aab89e6cb3c8-kube-api-access-q9hg2\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.020040 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/232f0a78-781e-48d2-a8ca-aab89e6cb3c8-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.020120 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/232f0a78-781e-48d2-a8ca-aab89e6cb3c8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.309469 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.325931 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.340276 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 05:49:54 crc kubenswrapper[4661]: E1001 05:49:54.340729 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="232f0a78-781e-48d2-a8ca-aab89e6cb3c8" containerName="nova-scheduler-scheduler" Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.340746 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="232f0a78-781e-48d2-a8ca-aab89e6cb3c8" containerName="nova-scheduler-scheduler" Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.340949 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="232f0a78-781e-48d2-a8ca-aab89e6cb3c8" containerName="nova-scheduler-scheduler" Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.341890 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.346622 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.347775 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.530142 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2f02b25-17c7-4302-b82f-d9954e27a070-config-data\") pod \"nova-scheduler-0\" (UID: \"c2f02b25-17c7-4302-b82f-d9954e27a070\") " pod="openstack/nova-scheduler-0" Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.530182 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2f02b25-17c7-4302-b82f-d9954e27a070-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c2f02b25-17c7-4302-b82f-d9954e27a070\") " pod="openstack/nova-scheduler-0" Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.530242 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9vf9\" (UniqueName: \"kubernetes.io/projected/c2f02b25-17c7-4302-b82f-d9954e27a070-kube-api-access-z9vf9\") pod \"nova-scheduler-0\" (UID: \"c2f02b25-17c7-4302-b82f-d9954e27a070\") " pod="openstack/nova-scheduler-0" Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.632515 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2f02b25-17c7-4302-b82f-d9954e27a070-config-data\") pod \"nova-scheduler-0\" (UID: \"c2f02b25-17c7-4302-b82f-d9954e27a070\") " pod="openstack/nova-scheduler-0" Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.632582 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2f02b25-17c7-4302-b82f-d9954e27a070-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c2f02b25-17c7-4302-b82f-d9954e27a070\") " pod="openstack/nova-scheduler-0" Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.632692 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9vf9\" (UniqueName: \"kubernetes.io/projected/c2f02b25-17c7-4302-b82f-d9954e27a070-kube-api-access-z9vf9\") pod \"nova-scheduler-0\" (UID: \"c2f02b25-17c7-4302-b82f-d9954e27a070\") " pod="openstack/nova-scheduler-0" Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.648387 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2f02b25-17c7-4302-b82f-d9954e27a070-config-data\") pod \"nova-scheduler-0\" (UID: \"c2f02b25-17c7-4302-b82f-d9954e27a070\") " pod="openstack/nova-scheduler-0" Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.660400 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9vf9\" (UniqueName: \"kubernetes.io/projected/c2f02b25-17c7-4302-b82f-d9954e27a070-kube-api-access-z9vf9\") pod \"nova-scheduler-0\" (UID: \"c2f02b25-17c7-4302-b82f-d9954e27a070\") " pod="openstack/nova-scheduler-0" Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.661138 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2f02b25-17c7-4302-b82f-d9954e27a070-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c2f02b25-17c7-4302-b82f-d9954e27a070\") " pod="openstack/nova-scheduler-0" Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.684289 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.906334 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"12ce0bf4-4fb7-44da-87d4-9592ef8848a1","Type":"ContainerStarted","Data":"101d778db9680e44c2a44042cc746103c5fee3fb19a3e2b90fd0016e4fa55f7f"} Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.906383 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"12ce0bf4-4fb7-44da-87d4-9592ef8848a1","Type":"ContainerStarted","Data":"b71ff6e4153753630486f4cf4dc92012b692837938d349150f56ac6a4d77f96b"} Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.906430 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 01 05:49:54 crc kubenswrapper[4661]: I1001 05:49:54.931784 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.931768736 podStartE2EDuration="2.931768736s" podCreationTimestamp="2025-10-01 05:49:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:49:54.924861477 +0000 UTC m=+1243.862840101" watchObservedRunningTime="2025-10-01 05:49:54.931768736 +0000 UTC m=+1243.869747350" Oct 01 05:49:55 crc kubenswrapper[4661]: I1001 05:49:55.174727 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 05:49:55 crc kubenswrapper[4661]: I1001 05:49:55.771343 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="232f0a78-781e-48d2-a8ca-aab89e6cb3c8" path="/var/lib/kubelet/pods/232f0a78-781e-48d2-a8ca-aab89e6cb3c8/volumes" Oct 01 05:49:55 crc kubenswrapper[4661]: I1001 05:49:55.927819 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c2f02b25-17c7-4302-b82f-d9954e27a070","Type":"ContainerStarted","Data":"118fe2ea5651e45b30689beb0017865229679d67fab488edcc7b8a2a667a2278"} Oct 01 05:49:55 crc kubenswrapper[4661]: I1001 05:49:55.927919 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c2f02b25-17c7-4302-b82f-d9954e27a070","Type":"ContainerStarted","Data":"a698b0611ee6615dfb8b6623ec75a8bc6e5cfa0f1fbc9a8d4ab981f1f4370869"} Oct 01 05:49:55 crc kubenswrapper[4661]: I1001 05:49:55.957704 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.957680565 podStartE2EDuration="1.957680565s" podCreationTimestamp="2025-10-01 05:49:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:49:55.946291676 +0000 UTC m=+1244.884270330" watchObservedRunningTime="2025-10-01 05:49:55.957680565 +0000 UTC m=+1244.895659209" Oct 01 05:49:59 crc kubenswrapper[4661]: I1001 05:49:59.685658 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 01 05:50:02 crc kubenswrapper[4661]: I1001 05:50:02.291756 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 01 05:50:02 crc kubenswrapper[4661]: I1001 05:50:02.292292 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 01 05:50:03 crc kubenswrapper[4661]: I1001 05:50:03.353132 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 01 05:50:03 crc kubenswrapper[4661]: I1001 05:50:03.374804 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6ce9ee53-bca9-42ec-813c-b6c26e231231" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.211:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 05:50:03 crc kubenswrapper[4661]: I1001 05:50:03.374834 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6ce9ee53-bca9-42ec-813c-b6c26e231231" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.211:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 05:50:04 crc kubenswrapper[4661]: I1001 05:50:04.684892 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 01 05:50:04 crc kubenswrapper[4661]: I1001 05:50:04.717441 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 01 05:50:05 crc kubenswrapper[4661]: I1001 05:50:05.082686 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.120645 4661 generic.go:334] "Generic (PLEG): container finished" podID="8070a6cb-4bc1-4376-818a-6b99d638166d" containerID="be0b2e70ed01ce29b794479aff4bdebc48a7399c396df1948b7415a936598030" exitCode=137 Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.120704 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8070a6cb-4bc1-4376-818a-6b99d638166d","Type":"ContainerDied","Data":"be0b2e70ed01ce29b794479aff4bdebc48a7399c396df1948b7415a936598030"} Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.121245 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8070a6cb-4bc1-4376-818a-6b99d638166d","Type":"ContainerDied","Data":"578562b9818c82a954da299d679a7319ae8f51f7d854d4ca5a4b1405fcc936ee"} Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.121297 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="578562b9818c82a954da299d679a7319ae8f51f7d854d4ca5a4b1405fcc936ee" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.123895 4661 generic.go:334] "Generic (PLEG): container finished" podID="7b1c1df6-54c2-46e2-9ff9-51d6837e7850" containerID="fde785b55e762d033fa631fb6e4669eb7db71f7fbd0a9040850349258c6feadc" exitCode=137 Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.123924 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7b1c1df6-54c2-46e2-9ff9-51d6837e7850","Type":"ContainerDied","Data":"fde785b55e762d033fa631fb6e4669eb7db71f7fbd0a9040850349258c6feadc"} Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.123938 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7b1c1df6-54c2-46e2-9ff9-51d6837e7850","Type":"ContainerDied","Data":"33873b0008bac5ac1bf34f34d781db512317e540d325f061dfbac9cd0e76206f"} Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.123946 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="33873b0008bac5ac1bf34f34d781db512317e540d325f061dfbac9cd0e76206f" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.186860 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.204275 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.210471 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.296547 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.297295 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.301847 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.311802 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.330062 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hh782\" (UniqueName: \"kubernetes.io/projected/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-kube-api-access-hh782\") pod \"7b1c1df6-54c2-46e2-9ff9-51d6837e7850\" (UID: \"7b1c1df6-54c2-46e2-9ff9-51d6837e7850\") " Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.330252 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8070a6cb-4bc1-4376-818a-6b99d638166d-config-data\") pod \"8070a6cb-4bc1-4376-818a-6b99d638166d\" (UID: \"8070a6cb-4bc1-4376-818a-6b99d638166d\") " Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.330293 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-logs\") pod \"7b1c1df6-54c2-46e2-9ff9-51d6837e7850\" (UID: \"7b1c1df6-54c2-46e2-9ff9-51d6837e7850\") " Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.330343 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-combined-ca-bundle\") pod \"7b1c1df6-54c2-46e2-9ff9-51d6837e7850\" (UID: \"7b1c1df6-54c2-46e2-9ff9-51d6837e7850\") " Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.330371 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-config-data\") pod \"7b1c1df6-54c2-46e2-9ff9-51d6837e7850\" (UID: \"7b1c1df6-54c2-46e2-9ff9-51d6837e7850\") " Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.330476 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lt2fx\" (UniqueName: \"kubernetes.io/projected/8070a6cb-4bc1-4376-818a-6b99d638166d-kube-api-access-lt2fx\") pod \"8070a6cb-4bc1-4376-818a-6b99d638166d\" (UID: \"8070a6cb-4bc1-4376-818a-6b99d638166d\") " Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.330532 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8070a6cb-4bc1-4376-818a-6b99d638166d-combined-ca-bundle\") pod \"8070a6cb-4bc1-4376-818a-6b99d638166d\" (UID: \"8070a6cb-4bc1-4376-818a-6b99d638166d\") " Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.338053 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-logs" (OuterVolumeSpecName: "logs") pod "7b1c1df6-54c2-46e2-9ff9-51d6837e7850" (UID: "7b1c1df6-54c2-46e2-9ff9-51d6837e7850"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.339907 4661 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-logs\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.368449 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8070a6cb-4bc1-4376-818a-6b99d638166d-kube-api-access-lt2fx" (OuterVolumeSpecName: "kube-api-access-lt2fx") pod "8070a6cb-4bc1-4376-818a-6b99d638166d" (UID: "8070a6cb-4bc1-4376-818a-6b99d638166d"). InnerVolumeSpecName "kube-api-access-lt2fx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.368523 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-kube-api-access-hh782" (OuterVolumeSpecName: "kube-api-access-hh782") pod "7b1c1df6-54c2-46e2-9ff9-51d6837e7850" (UID: "7b1c1df6-54c2-46e2-9ff9-51d6837e7850"). InnerVolumeSpecName "kube-api-access-hh782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.390265 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8070a6cb-4bc1-4376-818a-6b99d638166d-config-data" (OuterVolumeSpecName: "config-data") pod "8070a6cb-4bc1-4376-818a-6b99d638166d" (UID: "8070a6cb-4bc1-4376-818a-6b99d638166d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.390767 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7b1c1df6-54c2-46e2-9ff9-51d6837e7850" (UID: "7b1c1df6-54c2-46e2-9ff9-51d6837e7850"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.401480 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8070a6cb-4bc1-4376-818a-6b99d638166d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8070a6cb-4bc1-4376-818a-6b99d638166d" (UID: "8070a6cb-4bc1-4376-818a-6b99d638166d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.422276 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-config-data" (OuterVolumeSpecName: "config-data") pod "7b1c1df6-54c2-46e2-9ff9-51d6837e7850" (UID: "7b1c1df6-54c2-46e2-9ff9-51d6837e7850"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.441284 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lt2fx\" (UniqueName: \"kubernetes.io/projected/8070a6cb-4bc1-4376-818a-6b99d638166d-kube-api-access-lt2fx\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.441310 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8070a6cb-4bc1-4376-818a-6b99d638166d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.441322 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hh782\" (UniqueName: \"kubernetes.io/projected/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-kube-api-access-hh782\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.441335 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8070a6cb-4bc1-4376-818a-6b99d638166d-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.441347 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:12 crc kubenswrapper[4661]: I1001 05:50:12.441357 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b1c1df6-54c2-46e2-9ff9-51d6837e7850-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.135776 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.135787 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.136087 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.143241 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.230075 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.248396 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.269712 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.297355 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.305366 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 05:50:13 crc kubenswrapper[4661]: E1001 05:50:13.305920 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8070a6cb-4bc1-4376-818a-6b99d638166d" containerName="nova-cell1-novncproxy-novncproxy" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.305934 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="8070a6cb-4bc1-4376-818a-6b99d638166d" containerName="nova-cell1-novncproxy-novncproxy" Oct 01 05:50:13 crc kubenswrapper[4661]: E1001 05:50:13.305944 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b1c1df6-54c2-46e2-9ff9-51d6837e7850" containerName="nova-metadata-metadata" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.305952 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b1c1df6-54c2-46e2-9ff9-51d6837e7850" containerName="nova-metadata-metadata" Oct 01 05:50:13 crc kubenswrapper[4661]: E1001 05:50:13.305960 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b1c1df6-54c2-46e2-9ff9-51d6837e7850" containerName="nova-metadata-log" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.305966 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b1c1df6-54c2-46e2-9ff9-51d6837e7850" containerName="nova-metadata-log" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.306157 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b1c1df6-54c2-46e2-9ff9-51d6837e7850" containerName="nova-metadata-log" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.306171 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b1c1df6-54c2-46e2-9ff9-51d6837e7850" containerName="nova-metadata-metadata" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.306181 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="8070a6cb-4bc1-4376-818a-6b99d638166d" containerName="nova-cell1-novncproxy-novncproxy" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.306881 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.312875 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.313115 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.316132 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.316304 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.318060 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.321415 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.325019 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.359691 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.374856 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.395342 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-54599d8f7-s42qj"] Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.397223 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.412695 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54599d8f7-s42qj"] Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.464587 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52k8f\" (UniqueName: \"kubernetes.io/projected/34d469db-7404-45b7-8a09-2e0a516ab469-kube-api-access-52k8f\") pod \"nova-metadata-0\" (UID: \"34d469db-7404-45b7-8a09-2e0a516ab469\") " pod="openstack/nova-metadata-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.464667 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/34d469db-7404-45b7-8a09-2e0a516ab469-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"34d469db-7404-45b7-8a09-2e0a516ab469\") " pod="openstack/nova-metadata-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.464707 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81e96b52-f038-466f-92cd-07f4f8574bd5-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"81e96b52-f038-466f-92cd-07f4f8574bd5\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.464740 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34d469db-7404-45b7-8a09-2e0a516ab469-logs\") pod \"nova-metadata-0\" (UID: \"34d469db-7404-45b7-8a09-2e0a516ab469\") " pod="openstack/nova-metadata-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.464765 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34d469db-7404-45b7-8a09-2e0a516ab469-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"34d469db-7404-45b7-8a09-2e0a516ab469\") " pod="openstack/nova-metadata-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.464786 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/81e96b52-f038-466f-92cd-07f4f8574bd5-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"81e96b52-f038-466f-92cd-07f4f8574bd5\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.464807 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/81e96b52-f038-466f-92cd-07f4f8574bd5-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"81e96b52-f038-466f-92cd-07f4f8574bd5\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.464847 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5grk5\" (UniqueName: \"kubernetes.io/projected/81e96b52-f038-466f-92cd-07f4f8574bd5-kube-api-access-5grk5\") pod \"nova-cell1-novncproxy-0\" (UID: \"81e96b52-f038-466f-92cd-07f4f8574bd5\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.464865 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81e96b52-f038-466f-92cd-07f4f8574bd5-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"81e96b52-f038-466f-92cd-07f4f8574bd5\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.464881 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34d469db-7404-45b7-8a09-2e0a516ab469-config-data\") pod \"nova-metadata-0\" (UID: \"34d469db-7404-45b7-8a09-2e0a516ab469\") " pod="openstack/nova-metadata-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.566319 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9scp\" (UniqueName: \"kubernetes.io/projected/d15989a0-c753-4158-a901-c018777e7560-kube-api-access-k9scp\") pod \"dnsmasq-dns-54599d8f7-s42qj\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.566366 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81e96b52-f038-466f-92cd-07f4f8574bd5-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"81e96b52-f038-466f-92cd-07f4f8574bd5\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.566410 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34d469db-7404-45b7-8a09-2e0a516ab469-logs\") pod \"nova-metadata-0\" (UID: \"34d469db-7404-45b7-8a09-2e0a516ab469\") " pod="openstack/nova-metadata-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.566435 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34d469db-7404-45b7-8a09-2e0a516ab469-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"34d469db-7404-45b7-8a09-2e0a516ab469\") " pod="openstack/nova-metadata-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.566478 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/81e96b52-f038-466f-92cd-07f4f8574bd5-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"81e96b52-f038-466f-92cd-07f4f8574bd5\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.566503 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-ovsdbserver-sb\") pod \"dnsmasq-dns-54599d8f7-s42qj\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.566717 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/81e96b52-f038-466f-92cd-07f4f8574bd5-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"81e96b52-f038-466f-92cd-07f4f8574bd5\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.567169 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34d469db-7404-45b7-8a09-2e0a516ab469-logs\") pod \"nova-metadata-0\" (UID: \"34d469db-7404-45b7-8a09-2e0a516ab469\") " pod="openstack/nova-metadata-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.567251 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-ovsdbserver-nb\") pod \"dnsmasq-dns-54599d8f7-s42qj\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.567319 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5grk5\" (UniqueName: \"kubernetes.io/projected/81e96b52-f038-466f-92cd-07f4f8574bd5-kube-api-access-5grk5\") pod \"nova-cell1-novncproxy-0\" (UID: \"81e96b52-f038-466f-92cd-07f4f8574bd5\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.567356 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81e96b52-f038-466f-92cd-07f4f8574bd5-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"81e96b52-f038-466f-92cd-07f4f8574bd5\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.567395 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34d469db-7404-45b7-8a09-2e0a516ab469-config-data\") pod \"nova-metadata-0\" (UID: \"34d469db-7404-45b7-8a09-2e0a516ab469\") " pod="openstack/nova-metadata-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.567457 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-dns-svc\") pod \"dnsmasq-dns-54599d8f7-s42qj\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.567530 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-config\") pod \"dnsmasq-dns-54599d8f7-s42qj\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.567578 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-dns-swift-storage-0\") pod \"dnsmasq-dns-54599d8f7-s42qj\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.567610 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52k8f\" (UniqueName: \"kubernetes.io/projected/34d469db-7404-45b7-8a09-2e0a516ab469-kube-api-access-52k8f\") pod \"nova-metadata-0\" (UID: \"34d469db-7404-45b7-8a09-2e0a516ab469\") " pod="openstack/nova-metadata-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.567706 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/34d469db-7404-45b7-8a09-2e0a516ab469-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"34d469db-7404-45b7-8a09-2e0a516ab469\") " pod="openstack/nova-metadata-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.571426 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/81e96b52-f038-466f-92cd-07f4f8574bd5-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"81e96b52-f038-466f-92cd-07f4f8574bd5\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.573151 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/34d469db-7404-45b7-8a09-2e0a516ab469-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"34d469db-7404-45b7-8a09-2e0a516ab469\") " pod="openstack/nova-metadata-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.573270 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34d469db-7404-45b7-8a09-2e0a516ab469-config-data\") pod \"nova-metadata-0\" (UID: \"34d469db-7404-45b7-8a09-2e0a516ab469\") " pod="openstack/nova-metadata-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.573484 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81e96b52-f038-466f-92cd-07f4f8574bd5-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"81e96b52-f038-466f-92cd-07f4f8574bd5\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.574255 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34d469db-7404-45b7-8a09-2e0a516ab469-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"34d469db-7404-45b7-8a09-2e0a516ab469\") " pod="openstack/nova-metadata-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.574348 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81e96b52-f038-466f-92cd-07f4f8574bd5-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"81e96b52-f038-466f-92cd-07f4f8574bd5\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.575035 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/81e96b52-f038-466f-92cd-07f4f8574bd5-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"81e96b52-f038-466f-92cd-07f4f8574bd5\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.588184 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52k8f\" (UniqueName: \"kubernetes.io/projected/34d469db-7404-45b7-8a09-2e0a516ab469-kube-api-access-52k8f\") pod \"nova-metadata-0\" (UID: \"34d469db-7404-45b7-8a09-2e0a516ab469\") " pod="openstack/nova-metadata-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.588192 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5grk5\" (UniqueName: \"kubernetes.io/projected/81e96b52-f038-466f-92cd-07f4f8574bd5-kube-api-access-5grk5\") pod \"nova-cell1-novncproxy-0\" (UID: \"81e96b52-f038-466f-92cd-07f4f8574bd5\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.657098 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.666606 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.669435 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9scp\" (UniqueName: \"kubernetes.io/projected/d15989a0-c753-4158-a901-c018777e7560-kube-api-access-k9scp\") pod \"dnsmasq-dns-54599d8f7-s42qj\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.669495 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-ovsdbserver-sb\") pod \"dnsmasq-dns-54599d8f7-s42qj\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.669530 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-ovsdbserver-nb\") pod \"dnsmasq-dns-54599d8f7-s42qj\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.669572 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-dns-svc\") pod \"dnsmasq-dns-54599d8f7-s42qj\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.669603 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-config\") pod \"dnsmasq-dns-54599d8f7-s42qj\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.669643 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-dns-swift-storage-0\") pod \"dnsmasq-dns-54599d8f7-s42qj\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.670346 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-dns-swift-storage-0\") pod \"dnsmasq-dns-54599d8f7-s42qj\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.671231 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-ovsdbserver-sb\") pod \"dnsmasq-dns-54599d8f7-s42qj\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.672237 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-dns-svc\") pod \"dnsmasq-dns-54599d8f7-s42qj\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.672339 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-config\") pod \"dnsmasq-dns-54599d8f7-s42qj\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.672444 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-ovsdbserver-nb\") pod \"dnsmasq-dns-54599d8f7-s42qj\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.688234 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9scp\" (UniqueName: \"kubernetes.io/projected/d15989a0-c753-4158-a901-c018777e7560-kube-api-access-k9scp\") pod \"dnsmasq-dns-54599d8f7-s42qj\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.715136 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.769561 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b1c1df6-54c2-46e2-9ff9-51d6837e7850" path="/var/lib/kubelet/pods/7b1c1df6-54c2-46e2-9ff9-51d6837e7850/volumes" Oct 01 05:50:13 crc kubenswrapper[4661]: I1001 05:50:13.770393 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8070a6cb-4bc1-4376-818a-6b99d638166d" path="/var/lib/kubelet/pods/8070a6cb-4bc1-4376-818a-6b99d638166d/volumes" Oct 01 05:50:14 crc kubenswrapper[4661]: I1001 05:50:14.209791 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 05:50:14 crc kubenswrapper[4661]: W1001 05:50:14.219830 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod34d469db_7404_45b7_8a09_2e0a516ab469.slice/crio-faad0d6b87881be2c0bc08a118bff3e001101e69fa3e859e12a1202eed6e88b9 WatchSource:0}: Error finding container faad0d6b87881be2c0bc08a118bff3e001101e69fa3e859e12a1202eed6e88b9: Status 404 returned error can't find the container with id faad0d6b87881be2c0bc08a118bff3e001101e69fa3e859e12a1202eed6e88b9 Oct 01 05:50:14 crc kubenswrapper[4661]: I1001 05:50:14.320657 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 05:50:14 crc kubenswrapper[4661]: W1001 05:50:14.342413 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod81e96b52_f038_466f_92cd_07f4f8574bd5.slice/crio-fcad201c5efeb69b6ae86aac1e760b94cf489c16ca438ccc1ab5a0415c3495f6 WatchSource:0}: Error finding container fcad201c5efeb69b6ae86aac1e760b94cf489c16ca438ccc1ab5a0415c3495f6: Status 404 returned error can't find the container with id fcad201c5efeb69b6ae86aac1e760b94cf489c16ca438ccc1ab5a0415c3495f6 Oct 01 05:50:14 crc kubenswrapper[4661]: W1001 05:50:14.404440 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd15989a0_c753_4158_a901_c018777e7560.slice/crio-4dfaa5c0e41e2b75f086ad8fea794ddaed124e8ae60544e1f0c86c9cc3efdbb6 WatchSource:0}: Error finding container 4dfaa5c0e41e2b75f086ad8fea794ddaed124e8ae60544e1f0c86c9cc3efdbb6: Status 404 returned error can't find the container with id 4dfaa5c0e41e2b75f086ad8fea794ddaed124e8ae60544e1f0c86c9cc3efdbb6 Oct 01 05:50:14 crc kubenswrapper[4661]: I1001 05:50:14.407874 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54599d8f7-s42qj"] Oct 01 05:50:15 crc kubenswrapper[4661]: I1001 05:50:15.155191 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"34d469db-7404-45b7-8a09-2e0a516ab469","Type":"ContainerStarted","Data":"5fbbea816a44b7cabad1dd38708ad87338cf5facb2ce19cdf707cc2db9ef7f10"} Oct 01 05:50:15 crc kubenswrapper[4661]: I1001 05:50:15.155463 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"34d469db-7404-45b7-8a09-2e0a516ab469","Type":"ContainerStarted","Data":"01e4a4b449fb0a89ebcd62f37185cbed6cde1c8ae841433bdddf4340595ee61a"} Oct 01 05:50:15 crc kubenswrapper[4661]: I1001 05:50:15.155477 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"34d469db-7404-45b7-8a09-2e0a516ab469","Type":"ContainerStarted","Data":"faad0d6b87881be2c0bc08a118bff3e001101e69fa3e859e12a1202eed6e88b9"} Oct 01 05:50:15 crc kubenswrapper[4661]: I1001 05:50:15.157495 4661 generic.go:334] "Generic (PLEG): container finished" podID="d15989a0-c753-4158-a901-c018777e7560" containerID="8959cbdd5ea3e2a2fd9b24f4e46966e02f067bfcb65e4265b0b799edb5a0fd28" exitCode=0 Oct 01 05:50:15 crc kubenswrapper[4661]: I1001 05:50:15.157550 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54599d8f7-s42qj" event={"ID":"d15989a0-c753-4158-a901-c018777e7560","Type":"ContainerDied","Data":"8959cbdd5ea3e2a2fd9b24f4e46966e02f067bfcb65e4265b0b799edb5a0fd28"} Oct 01 05:50:15 crc kubenswrapper[4661]: I1001 05:50:15.157599 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54599d8f7-s42qj" event={"ID":"d15989a0-c753-4158-a901-c018777e7560","Type":"ContainerStarted","Data":"4dfaa5c0e41e2b75f086ad8fea794ddaed124e8ae60544e1f0c86c9cc3efdbb6"} Oct 01 05:50:15 crc kubenswrapper[4661]: I1001 05:50:15.160097 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"81e96b52-f038-466f-92cd-07f4f8574bd5","Type":"ContainerStarted","Data":"fc65ebe015a24d02f3c3a8bb4bef130986e966d729ab26a940c2cf534df74a45"} Oct 01 05:50:15 crc kubenswrapper[4661]: I1001 05:50:15.160133 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"81e96b52-f038-466f-92cd-07f4f8574bd5","Type":"ContainerStarted","Data":"fcad201c5efeb69b6ae86aac1e760b94cf489c16ca438ccc1ab5a0415c3495f6"} Oct 01 05:50:15 crc kubenswrapper[4661]: I1001 05:50:15.185730 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.18570991 podStartE2EDuration="2.18570991s" podCreationTimestamp="2025-10-01 05:50:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:50:15.180126278 +0000 UTC m=+1264.118104892" watchObservedRunningTime="2025-10-01 05:50:15.18570991 +0000 UTC m=+1264.123688514" Oct 01 05:50:15 crc kubenswrapper[4661]: I1001 05:50:15.261491 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.2614734309999998 podStartE2EDuration="2.261473431s" podCreationTimestamp="2025-10-01 05:50:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:50:15.225144023 +0000 UTC m=+1264.163122637" watchObservedRunningTime="2025-10-01 05:50:15.261473431 +0000 UTC m=+1264.199452035" Oct 01 05:50:15 crc kubenswrapper[4661]: I1001 05:50:15.737252 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 01 05:50:16 crc kubenswrapper[4661]: I1001 05:50:16.172891 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54599d8f7-s42qj" event={"ID":"d15989a0-c753-4158-a901-c018777e7560","Type":"ContainerStarted","Data":"2474e19919406a94c6f89f0a9558dc966372c704f620adaca4e7817165f0480f"} Oct 01 05:50:16 crc kubenswrapper[4661]: I1001 05:50:16.173894 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6ce9ee53-bca9-42ec-813c-b6c26e231231" containerName="nova-api-log" containerID="cri-o://3e9c0c7b85876020250a0825dbb4c583fbe1197339921e7d2e6dd3156ebe7149" gracePeriod=30 Oct 01 05:50:16 crc kubenswrapper[4661]: I1001 05:50:16.174027 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:50:16 crc kubenswrapper[4661]: I1001 05:50:16.174102 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6ce9ee53-bca9-42ec-813c-b6c26e231231" containerName="nova-api-api" containerID="cri-o://f644c4edec6f071f3da775605d3e9aa59a604852e5c060fb9ad74c1831d36452" gracePeriod=30 Oct 01 05:50:16 crc kubenswrapper[4661]: I1001 05:50:16.214154 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-54599d8f7-s42qj" podStartSLOduration=3.214132988 podStartE2EDuration="3.214132988s" podCreationTimestamp="2025-10-01 05:50:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:50:16.193103375 +0000 UTC m=+1265.131082019" watchObservedRunningTime="2025-10-01 05:50:16.214132988 +0000 UTC m=+1265.152111602" Oct 01 05:50:16 crc kubenswrapper[4661]: I1001 05:50:16.316441 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:50:16 crc kubenswrapper[4661]: I1001 05:50:16.316781 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fd19589d-f86d-440a-92a9-b6bf02e4989a" containerName="ceilometer-central-agent" containerID="cri-o://880e3bdb36218da33344d7652c8ea6470a91fbd2fafe0a87e3aa5ee18c2507e5" gracePeriod=30 Oct 01 05:50:16 crc kubenswrapper[4661]: I1001 05:50:16.316916 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fd19589d-f86d-440a-92a9-b6bf02e4989a" containerName="proxy-httpd" containerID="cri-o://b28e786d900403a5852a547e8857b046f109ea3b6bce3c41cab351e67b49912b" gracePeriod=30 Oct 01 05:50:16 crc kubenswrapper[4661]: I1001 05:50:16.316962 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fd19589d-f86d-440a-92a9-b6bf02e4989a" containerName="sg-core" containerID="cri-o://946fb21648cc87a3d94219ba4bbdce4d6a43014fb5734ce3808a64705986b318" gracePeriod=30 Oct 01 05:50:16 crc kubenswrapper[4661]: I1001 05:50:16.317023 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fd19589d-f86d-440a-92a9-b6bf02e4989a" containerName="ceilometer-notification-agent" containerID="cri-o://72e2ebd3f4ed64d1e0369371c926f4d0f8eedd59dadbc862a4a5535a7e40a269" gracePeriod=30 Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.186210 4661 generic.go:334] "Generic (PLEG): container finished" podID="6ce9ee53-bca9-42ec-813c-b6c26e231231" containerID="f644c4edec6f071f3da775605d3e9aa59a604852e5c060fb9ad74c1831d36452" exitCode=0 Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.186456 4661 generic.go:334] "Generic (PLEG): container finished" podID="6ce9ee53-bca9-42ec-813c-b6c26e231231" containerID="3e9c0c7b85876020250a0825dbb4c583fbe1197339921e7d2e6dd3156ebe7149" exitCode=143 Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.186294 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6ce9ee53-bca9-42ec-813c-b6c26e231231","Type":"ContainerDied","Data":"f644c4edec6f071f3da775605d3e9aa59a604852e5c060fb9ad74c1831d36452"} Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.186516 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6ce9ee53-bca9-42ec-813c-b6c26e231231","Type":"ContainerDied","Data":"3e9c0c7b85876020250a0825dbb4c583fbe1197339921e7d2e6dd3156ebe7149"} Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.189150 4661 generic.go:334] "Generic (PLEG): container finished" podID="fd19589d-f86d-440a-92a9-b6bf02e4989a" containerID="b28e786d900403a5852a547e8857b046f109ea3b6bce3c41cab351e67b49912b" exitCode=0 Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.189179 4661 generic.go:334] "Generic (PLEG): container finished" podID="fd19589d-f86d-440a-92a9-b6bf02e4989a" containerID="946fb21648cc87a3d94219ba4bbdce4d6a43014fb5734ce3808a64705986b318" exitCode=2 Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.189192 4661 generic.go:334] "Generic (PLEG): container finished" podID="fd19589d-f86d-440a-92a9-b6bf02e4989a" containerID="880e3bdb36218da33344d7652c8ea6470a91fbd2fafe0a87e3aa5ee18c2507e5" exitCode=0 Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.189465 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fd19589d-f86d-440a-92a9-b6bf02e4989a","Type":"ContainerDied","Data":"b28e786d900403a5852a547e8857b046f109ea3b6bce3c41cab351e67b49912b"} Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.189487 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fd19589d-f86d-440a-92a9-b6bf02e4989a","Type":"ContainerDied","Data":"946fb21648cc87a3d94219ba4bbdce4d6a43014fb5734ce3808a64705986b318"} Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.189497 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fd19589d-f86d-440a-92a9-b6bf02e4989a","Type":"ContainerDied","Data":"880e3bdb36218da33344d7652c8ea6470a91fbd2fafe0a87e3aa5ee18c2507e5"} Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.489737 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.644316 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ce9ee53-bca9-42ec-813c-b6c26e231231-config-data\") pod \"6ce9ee53-bca9-42ec-813c-b6c26e231231\" (UID: \"6ce9ee53-bca9-42ec-813c-b6c26e231231\") " Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.644606 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ce9ee53-bca9-42ec-813c-b6c26e231231-combined-ca-bundle\") pod \"6ce9ee53-bca9-42ec-813c-b6c26e231231\" (UID: \"6ce9ee53-bca9-42ec-813c-b6c26e231231\") " Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.644655 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ce9ee53-bca9-42ec-813c-b6c26e231231-logs\") pod \"6ce9ee53-bca9-42ec-813c-b6c26e231231\" (UID: \"6ce9ee53-bca9-42ec-813c-b6c26e231231\") " Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.644757 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9wj8\" (UniqueName: \"kubernetes.io/projected/6ce9ee53-bca9-42ec-813c-b6c26e231231-kube-api-access-c9wj8\") pod \"6ce9ee53-bca9-42ec-813c-b6c26e231231\" (UID: \"6ce9ee53-bca9-42ec-813c-b6c26e231231\") " Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.650319 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ce9ee53-bca9-42ec-813c-b6c26e231231-logs" (OuterVolumeSpecName: "logs") pod "6ce9ee53-bca9-42ec-813c-b6c26e231231" (UID: "6ce9ee53-bca9-42ec-813c-b6c26e231231"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.651836 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ce9ee53-bca9-42ec-813c-b6c26e231231-kube-api-access-c9wj8" (OuterVolumeSpecName: "kube-api-access-c9wj8") pod "6ce9ee53-bca9-42ec-813c-b6c26e231231" (UID: "6ce9ee53-bca9-42ec-813c-b6c26e231231"). InnerVolumeSpecName "kube-api-access-c9wj8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.716792 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ce9ee53-bca9-42ec-813c-b6c26e231231-config-data" (OuterVolumeSpecName: "config-data") pod "6ce9ee53-bca9-42ec-813c-b6c26e231231" (UID: "6ce9ee53-bca9-42ec-813c-b6c26e231231"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.738864 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ce9ee53-bca9-42ec-813c-b6c26e231231-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6ce9ee53-bca9-42ec-813c-b6c26e231231" (UID: "6ce9ee53-bca9-42ec-813c-b6c26e231231"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.748156 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ce9ee53-bca9-42ec-813c-b6c26e231231-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.748182 4661 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ce9ee53-bca9-42ec-813c-b6c26e231231-logs\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.748191 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9wj8\" (UniqueName: \"kubernetes.io/projected/6ce9ee53-bca9-42ec-813c-b6c26e231231-kube-api-access-c9wj8\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:17 crc kubenswrapper[4661]: I1001 05:50:17.748200 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ce9ee53-bca9-42ec-813c-b6c26e231231-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.201687 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6ce9ee53-bca9-42ec-813c-b6c26e231231","Type":"ContainerDied","Data":"398ae6c5bf984759eea7e9707daa4ee3fc338861039e22b559f4096b07c3d305"} Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.201736 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.201768 4661 scope.go:117] "RemoveContainer" containerID="f644c4edec6f071f3da775605d3e9aa59a604852e5c060fb9ad74c1831d36452" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.254907 4661 scope.go:117] "RemoveContainer" containerID="3e9c0c7b85876020250a0825dbb4c583fbe1197339921e7d2e6dd3156ebe7149" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.263691 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.280366 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.296599 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 01 05:50:18 crc kubenswrapper[4661]: E1001 05:50:18.297157 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ce9ee53-bca9-42ec-813c-b6c26e231231" containerName="nova-api-api" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.297182 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ce9ee53-bca9-42ec-813c-b6c26e231231" containerName="nova-api-api" Oct 01 05:50:18 crc kubenswrapper[4661]: E1001 05:50:18.297207 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ce9ee53-bca9-42ec-813c-b6c26e231231" containerName="nova-api-log" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.297216 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ce9ee53-bca9-42ec-813c-b6c26e231231" containerName="nova-api-log" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.297479 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ce9ee53-bca9-42ec-813c-b6c26e231231" containerName="nova-api-api" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.297505 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ce9ee53-bca9-42ec-813c-b6c26e231231" containerName="nova-api-log" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.298900 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.304213 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.304432 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.305138 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.316297 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.359312 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " pod="openstack/nova-api-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.359387 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-config-data\") pod \"nova-api-0\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " pod="openstack/nova-api-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.359490 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96ecc383-d231-407b-8d53-31a3538d3ff7-logs\") pod \"nova-api-0\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " pod="openstack/nova-api-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.360007 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-public-tls-certs\") pod \"nova-api-0\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " pod="openstack/nova-api-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.360337 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kd9cb\" (UniqueName: \"kubernetes.io/projected/96ecc383-d231-407b-8d53-31a3538d3ff7-kube-api-access-kd9cb\") pod \"nova-api-0\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " pod="openstack/nova-api-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.360548 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-internal-tls-certs\") pod \"nova-api-0\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " pod="openstack/nova-api-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.461812 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-internal-tls-certs\") pod \"nova-api-0\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " pod="openstack/nova-api-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.462122 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " pod="openstack/nova-api-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.462159 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-config-data\") pod \"nova-api-0\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " pod="openstack/nova-api-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.462196 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96ecc383-d231-407b-8d53-31a3538d3ff7-logs\") pod \"nova-api-0\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " pod="openstack/nova-api-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.462268 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-public-tls-certs\") pod \"nova-api-0\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " pod="openstack/nova-api-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.462302 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kd9cb\" (UniqueName: \"kubernetes.io/projected/96ecc383-d231-407b-8d53-31a3538d3ff7-kube-api-access-kd9cb\") pod \"nova-api-0\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " pod="openstack/nova-api-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.462784 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96ecc383-d231-407b-8d53-31a3538d3ff7-logs\") pod \"nova-api-0\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " pod="openstack/nova-api-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.467216 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-internal-tls-certs\") pod \"nova-api-0\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " pod="openstack/nova-api-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.467256 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-public-tls-certs\") pod \"nova-api-0\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " pod="openstack/nova-api-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.469108 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-config-data\") pod \"nova-api-0\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " pod="openstack/nova-api-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.474852 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " pod="openstack/nova-api-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.486254 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kd9cb\" (UniqueName: \"kubernetes.io/projected/96ecc383-d231-407b-8d53-31a3538d3ff7-kube-api-access-kd9cb\") pod \"nova-api-0\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " pod="openstack/nova-api-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.620086 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.657838 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.667049 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.667101 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.719255 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 05:50:18 crc kubenswrapper[4661]: I1001 05:50:18.719489 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="0e50e984-837a-48c4-ac76-c62066f13512" containerName="kube-state-metrics" containerID="cri-o://e8b2efc7025af3adb7492b64093b096b47499d209e4e7699b66b207aab066bd1" gracePeriod=30 Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.113945 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.183319 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.215720 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"96ecc383-d231-407b-8d53-31a3538d3ff7","Type":"ContainerStarted","Data":"2278cccd80503138e73d4f5fd12d9220dae8b5ee8da97f10c830b45aca0ccf8b"} Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.218431 4661 generic.go:334] "Generic (PLEG): container finished" podID="0e50e984-837a-48c4-ac76-c62066f13512" containerID="e8b2efc7025af3adb7492b64093b096b47499d209e4e7699b66b207aab066bd1" exitCode=2 Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.218503 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"0e50e984-837a-48c4-ac76-c62066f13512","Type":"ContainerDied","Data":"e8b2efc7025af3adb7492b64093b096b47499d209e4e7699b66b207aab066bd1"} Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.218527 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"0e50e984-837a-48c4-ac76-c62066f13512","Type":"ContainerDied","Data":"ab5c7be3b9dddb38e1c079da2093817ec5a620fc34a366bcd421c096d913dde6"} Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.218543 4661 scope.go:117] "RemoveContainer" containerID="e8b2efc7025af3adb7492b64093b096b47499d209e4e7699b66b207aab066bd1" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.218620 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.261271 4661 scope.go:117] "RemoveContainer" containerID="e8b2efc7025af3adb7492b64093b096b47499d209e4e7699b66b207aab066bd1" Oct 01 05:50:19 crc kubenswrapper[4661]: E1001 05:50:19.261755 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8b2efc7025af3adb7492b64093b096b47499d209e4e7699b66b207aab066bd1\": container with ID starting with e8b2efc7025af3adb7492b64093b096b47499d209e4e7699b66b207aab066bd1 not found: ID does not exist" containerID="e8b2efc7025af3adb7492b64093b096b47499d209e4e7699b66b207aab066bd1" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.261802 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8b2efc7025af3adb7492b64093b096b47499d209e4e7699b66b207aab066bd1"} err="failed to get container status \"e8b2efc7025af3adb7492b64093b096b47499d209e4e7699b66b207aab066bd1\": rpc error: code = NotFound desc = could not find container \"e8b2efc7025af3adb7492b64093b096b47499d209e4e7699b66b207aab066bd1\": container with ID starting with e8b2efc7025af3adb7492b64093b096b47499d209e4e7699b66b207aab066bd1 not found: ID does not exist" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.286687 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2gzz\" (UniqueName: \"kubernetes.io/projected/0e50e984-837a-48c4-ac76-c62066f13512-kube-api-access-c2gzz\") pod \"0e50e984-837a-48c4-ac76-c62066f13512\" (UID: \"0e50e984-837a-48c4-ac76-c62066f13512\") " Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.293590 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e50e984-837a-48c4-ac76-c62066f13512-kube-api-access-c2gzz" (OuterVolumeSpecName: "kube-api-access-c2gzz") pod "0e50e984-837a-48c4-ac76-c62066f13512" (UID: "0e50e984-837a-48c4-ac76-c62066f13512"). InnerVolumeSpecName "kube-api-access-c2gzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.388813 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2gzz\" (UniqueName: \"kubernetes.io/projected/0e50e984-837a-48c4-ac76-c62066f13512-kube-api-access-c2gzz\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.595058 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.615908 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.626991 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 05:50:19 crc kubenswrapper[4661]: E1001 05:50:19.627436 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e50e984-837a-48c4-ac76-c62066f13512" containerName="kube-state-metrics" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.627449 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e50e984-837a-48c4-ac76-c62066f13512" containerName="kube-state-metrics" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.627717 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e50e984-837a-48c4-ac76-c62066f13512" containerName="kube-state-metrics" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.628376 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.631411 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.631472 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.636101 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.694492 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/ea68e214-b71a-4f5b-b2ee-9091d484023d-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"ea68e214-b71a-4f5b-b2ee-9091d484023d\") " pod="openstack/kube-state-metrics-0" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.694530 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea68e214-b71a-4f5b-b2ee-9091d484023d-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"ea68e214-b71a-4f5b-b2ee-9091d484023d\") " pod="openstack/kube-state-metrics-0" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.694554 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea68e214-b71a-4f5b-b2ee-9091d484023d-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"ea68e214-b71a-4f5b-b2ee-9091d484023d\") " pod="openstack/kube-state-metrics-0" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.694597 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vnlz\" (UniqueName: \"kubernetes.io/projected/ea68e214-b71a-4f5b-b2ee-9091d484023d-kube-api-access-7vnlz\") pod \"kube-state-metrics-0\" (UID: \"ea68e214-b71a-4f5b-b2ee-9091d484023d\") " pod="openstack/kube-state-metrics-0" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.767158 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e50e984-837a-48c4-ac76-c62066f13512" path="/var/lib/kubelet/pods/0e50e984-837a-48c4-ac76-c62066f13512/volumes" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.768659 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ce9ee53-bca9-42ec-813c-b6c26e231231" path="/var/lib/kubelet/pods/6ce9ee53-bca9-42ec-813c-b6c26e231231/volumes" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.797157 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/ea68e214-b71a-4f5b-b2ee-9091d484023d-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"ea68e214-b71a-4f5b-b2ee-9091d484023d\") " pod="openstack/kube-state-metrics-0" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.797205 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea68e214-b71a-4f5b-b2ee-9091d484023d-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"ea68e214-b71a-4f5b-b2ee-9091d484023d\") " pod="openstack/kube-state-metrics-0" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.797238 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea68e214-b71a-4f5b-b2ee-9091d484023d-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"ea68e214-b71a-4f5b-b2ee-9091d484023d\") " pod="openstack/kube-state-metrics-0" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.797272 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vnlz\" (UniqueName: \"kubernetes.io/projected/ea68e214-b71a-4f5b-b2ee-9091d484023d-kube-api-access-7vnlz\") pod \"kube-state-metrics-0\" (UID: \"ea68e214-b71a-4f5b-b2ee-9091d484023d\") " pod="openstack/kube-state-metrics-0" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.802295 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/ea68e214-b71a-4f5b-b2ee-9091d484023d-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"ea68e214-b71a-4f5b-b2ee-9091d484023d\") " pod="openstack/kube-state-metrics-0" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.803873 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea68e214-b71a-4f5b-b2ee-9091d484023d-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"ea68e214-b71a-4f5b-b2ee-9091d484023d\") " pod="openstack/kube-state-metrics-0" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.806826 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea68e214-b71a-4f5b-b2ee-9091d484023d-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"ea68e214-b71a-4f5b-b2ee-9091d484023d\") " pod="openstack/kube-state-metrics-0" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.813999 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vnlz\" (UniqueName: \"kubernetes.io/projected/ea68e214-b71a-4f5b-b2ee-9091d484023d-kube-api-access-7vnlz\") pod \"kube-state-metrics-0\" (UID: \"ea68e214-b71a-4f5b-b2ee-9091d484023d\") " pod="openstack/kube-state-metrics-0" Oct 01 05:50:19 crc kubenswrapper[4661]: I1001 05:50:19.982072 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 01 05:50:20 crc kubenswrapper[4661]: I1001 05:50:20.240278 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"96ecc383-d231-407b-8d53-31a3538d3ff7","Type":"ContainerStarted","Data":"c350845e0f7f000c2901675e4f516c9c1c9f30a0e92ba124ec1b7b37182972b4"} Oct 01 05:50:20 crc kubenswrapper[4661]: I1001 05:50:20.240313 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"96ecc383-d231-407b-8d53-31a3538d3ff7","Type":"ContainerStarted","Data":"84d3214d79b05dbbd6c686dbe08481d0e17f9bacfd705fa5e5bf5ce4cde86e06"} Oct 01 05:50:20 crc kubenswrapper[4661]: I1001 05:50:20.444599 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.444581538 podStartE2EDuration="2.444581538s" podCreationTimestamp="2025-10-01 05:50:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:50:20.262355828 +0000 UTC m=+1269.200334452" watchObservedRunningTime="2025-10-01 05:50:20.444581538 +0000 UTC m=+1269.382560152" Oct 01 05:50:20 crc kubenswrapper[4661]: I1001 05:50:20.445033 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 05:50:21 crc kubenswrapper[4661]: I1001 05:50:21.254923 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ea68e214-b71a-4f5b-b2ee-9091d484023d","Type":"ContainerStarted","Data":"bf7ff89b5e33cab46586af7ee68bc55e7a7590d20540b79aeef9d98a7719265c"} Oct 01 05:50:21 crc kubenswrapper[4661]: I1001 05:50:21.255338 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ea68e214-b71a-4f5b-b2ee-9091d484023d","Type":"ContainerStarted","Data":"df62a1fabed8b3b1ecf2e512fed413722c8a958dc0576a148cc7598648c3044e"} Oct 01 05:50:21 crc kubenswrapper[4661]: I1001 05:50:21.274376 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.859340145 podStartE2EDuration="2.274356669s" podCreationTimestamp="2025-10-01 05:50:19 +0000 UTC" firstStartedPulling="2025-10-01 05:50:20.453156441 +0000 UTC m=+1269.391135055" lastFinishedPulling="2025-10-01 05:50:20.868172955 +0000 UTC m=+1269.806151579" observedRunningTime="2025-10-01 05:50:21.270299049 +0000 UTC m=+1270.208277663" watchObservedRunningTime="2025-10-01 05:50:21.274356669 +0000 UTC m=+1270.212335323" Oct 01 05:50:22 crc kubenswrapper[4661]: I1001 05:50:22.267364 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 01 05:50:23 crc kubenswrapper[4661]: I1001 05:50:23.658064 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:23 crc kubenswrapper[4661]: I1001 05:50:23.667777 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 01 05:50:23 crc kubenswrapper[4661]: I1001 05:50:23.667801 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 01 05:50:23 crc kubenswrapper[4661]: I1001 05:50:23.683387 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:23 crc kubenswrapper[4661]: I1001 05:50:23.716964 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:50:23 crc kubenswrapper[4661]: I1001 05:50:23.862223 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-844fc57f6f-n2bs5"] Oct 01 05:50:23 crc kubenswrapper[4661]: I1001 05:50:23.862468 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" podUID="24db082c-81fd-427d-a0b0-57b64adc6f73" containerName="dnsmasq-dns" containerID="cri-o://319037cd7406a790025665fc0577d14227b3809f7fdca66bc36b95d28ac24dd3" gracePeriod=10 Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.333866 4661 generic.go:334] "Generic (PLEG): container finished" podID="24db082c-81fd-427d-a0b0-57b64adc6f73" containerID="319037cd7406a790025665fc0577d14227b3809f7fdca66bc36b95d28ac24dd3" exitCode=0 Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.334703 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" event={"ID":"24db082c-81fd-427d-a0b0-57b64adc6f73","Type":"ContainerDied","Data":"319037cd7406a790025665fc0577d14227b3809f7fdca66bc36b95d28ac24dd3"} Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.334728 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" event={"ID":"24db082c-81fd-427d-a0b0-57b64adc6f73","Type":"ContainerDied","Data":"fb52f75915c4578a9472e87a4ac9f4bb6df56379ec98ea5554f0d00343e5340a"} Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.334737 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fb52f75915c4578a9472e87a4ac9f4bb6df56379ec98ea5554f0d00343e5340a" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.359817 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.428886 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.518237 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-ovsdbserver-sb\") pod \"24db082c-81fd-427d-a0b0-57b64adc6f73\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.518367 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsvvr\" (UniqueName: \"kubernetes.io/projected/24db082c-81fd-427d-a0b0-57b64adc6f73-kube-api-access-gsvvr\") pod \"24db082c-81fd-427d-a0b0-57b64adc6f73\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.518442 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-dns-svc\") pod \"24db082c-81fd-427d-a0b0-57b64adc6f73\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.518460 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-config\") pod \"24db082c-81fd-427d-a0b0-57b64adc6f73\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.518516 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-dns-swift-storage-0\") pod \"24db082c-81fd-427d-a0b0-57b64adc6f73\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.518566 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-ovsdbserver-nb\") pod \"24db082c-81fd-427d-a0b0-57b64adc6f73\" (UID: \"24db082c-81fd-427d-a0b0-57b64adc6f73\") " Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.538819 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24db082c-81fd-427d-a0b0-57b64adc6f73-kube-api-access-gsvvr" (OuterVolumeSpecName: "kube-api-access-gsvvr") pod "24db082c-81fd-427d-a0b0-57b64adc6f73" (UID: "24db082c-81fd-427d-a0b0-57b64adc6f73"). InnerVolumeSpecName "kube-api-access-gsvvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.581794 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-config" (OuterVolumeSpecName: "config") pod "24db082c-81fd-427d-a0b0-57b64adc6f73" (UID: "24db082c-81fd-427d-a0b0-57b64adc6f73"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.586899 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "24db082c-81fd-427d-a0b0-57b64adc6f73" (UID: "24db082c-81fd-427d-a0b0-57b64adc6f73"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.588114 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "24db082c-81fd-427d-a0b0-57b64adc6f73" (UID: "24db082c-81fd-427d-a0b0-57b64adc6f73"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.593791 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-mfflc"] Oct 01 05:50:24 crc kubenswrapper[4661]: E1001 05:50:24.594187 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24db082c-81fd-427d-a0b0-57b64adc6f73" containerName="dnsmasq-dns" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.594205 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="24db082c-81fd-427d-a0b0-57b64adc6f73" containerName="dnsmasq-dns" Oct 01 05:50:24 crc kubenswrapper[4661]: E1001 05:50:24.594233 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24db082c-81fd-427d-a0b0-57b64adc6f73" containerName="init" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.594239 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="24db082c-81fd-427d-a0b0-57b64adc6f73" containerName="init" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.594443 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="24db082c-81fd-427d-a0b0-57b64adc6f73" containerName="dnsmasq-dns" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.595144 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-mfflc" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.600499 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.601059 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.605402 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-mfflc"] Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.620709 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsvvr\" (UniqueName: \"kubernetes.io/projected/24db082c-81fd-427d-a0b0-57b64adc6f73-kube-api-access-gsvvr\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.622941 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.622970 4661 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.622984 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.623363 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "24db082c-81fd-427d-a0b0-57b64adc6f73" (UID: "24db082c-81fd-427d-a0b0-57b64adc6f73"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.643210 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "24db082c-81fd-427d-a0b0-57b64adc6f73" (UID: "24db082c-81fd-427d-a0b0-57b64adc6f73"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.685761 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="34d469db-7404-45b7-8a09-2e0a516ab469" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.215:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.685771 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="34d469db-7404-45b7-8a09-2e0a516ab469" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.215:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.724466 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a9e6b8f-69ef-4dc3-87ae-132c47989184-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-mfflc\" (UID: \"7a9e6b8f-69ef-4dc3-87ae-132c47989184\") " pod="openstack/nova-cell1-cell-mapping-mfflc" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.724509 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a9e6b8f-69ef-4dc3-87ae-132c47989184-scripts\") pod \"nova-cell1-cell-mapping-mfflc\" (UID: \"7a9e6b8f-69ef-4dc3-87ae-132c47989184\") " pod="openstack/nova-cell1-cell-mapping-mfflc" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.724535 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a9e6b8f-69ef-4dc3-87ae-132c47989184-config-data\") pod \"nova-cell1-cell-mapping-mfflc\" (UID: \"7a9e6b8f-69ef-4dc3-87ae-132c47989184\") " pod="openstack/nova-cell1-cell-mapping-mfflc" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.724559 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8h9fj\" (UniqueName: \"kubernetes.io/projected/7a9e6b8f-69ef-4dc3-87ae-132c47989184-kube-api-access-8h9fj\") pod \"nova-cell1-cell-mapping-mfflc\" (UID: \"7a9e6b8f-69ef-4dc3-87ae-132c47989184\") " pod="openstack/nova-cell1-cell-mapping-mfflc" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.724608 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.724619 4661 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/24db082c-81fd-427d-a0b0-57b64adc6f73-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.826384 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a9e6b8f-69ef-4dc3-87ae-132c47989184-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-mfflc\" (UID: \"7a9e6b8f-69ef-4dc3-87ae-132c47989184\") " pod="openstack/nova-cell1-cell-mapping-mfflc" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.826800 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a9e6b8f-69ef-4dc3-87ae-132c47989184-scripts\") pod \"nova-cell1-cell-mapping-mfflc\" (UID: \"7a9e6b8f-69ef-4dc3-87ae-132c47989184\") " pod="openstack/nova-cell1-cell-mapping-mfflc" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.826854 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a9e6b8f-69ef-4dc3-87ae-132c47989184-config-data\") pod \"nova-cell1-cell-mapping-mfflc\" (UID: \"7a9e6b8f-69ef-4dc3-87ae-132c47989184\") " pod="openstack/nova-cell1-cell-mapping-mfflc" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.826893 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8h9fj\" (UniqueName: \"kubernetes.io/projected/7a9e6b8f-69ef-4dc3-87ae-132c47989184-kube-api-access-8h9fj\") pod \"nova-cell1-cell-mapping-mfflc\" (UID: \"7a9e6b8f-69ef-4dc3-87ae-132c47989184\") " pod="openstack/nova-cell1-cell-mapping-mfflc" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.831000 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a9e6b8f-69ef-4dc3-87ae-132c47989184-scripts\") pod \"nova-cell1-cell-mapping-mfflc\" (UID: \"7a9e6b8f-69ef-4dc3-87ae-132c47989184\") " pod="openstack/nova-cell1-cell-mapping-mfflc" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.831252 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a9e6b8f-69ef-4dc3-87ae-132c47989184-config-data\") pod \"nova-cell1-cell-mapping-mfflc\" (UID: \"7a9e6b8f-69ef-4dc3-87ae-132c47989184\") " pod="openstack/nova-cell1-cell-mapping-mfflc" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.831403 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a9e6b8f-69ef-4dc3-87ae-132c47989184-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-mfflc\" (UID: \"7a9e6b8f-69ef-4dc3-87ae-132c47989184\") " pod="openstack/nova-cell1-cell-mapping-mfflc" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.846672 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8h9fj\" (UniqueName: \"kubernetes.io/projected/7a9e6b8f-69ef-4dc3-87ae-132c47989184-kube-api-access-8h9fj\") pod \"nova-cell1-cell-mapping-mfflc\" (UID: \"7a9e6b8f-69ef-4dc3-87ae-132c47989184\") " pod="openstack/nova-cell1-cell-mapping-mfflc" Oct 01 05:50:24 crc kubenswrapper[4661]: I1001 05:50:24.957590 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-mfflc" Oct 01 05:50:25 crc kubenswrapper[4661]: I1001 05:50:25.344306 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-844fc57f6f-n2bs5" Oct 01 05:50:25 crc kubenswrapper[4661]: I1001 05:50:25.387169 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-844fc57f6f-n2bs5"] Oct 01 05:50:25 crc kubenswrapper[4661]: I1001 05:50:25.395496 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-844fc57f6f-n2bs5"] Oct 01 05:50:25 crc kubenswrapper[4661]: I1001 05:50:25.479091 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-mfflc"] Oct 01 05:50:25 crc kubenswrapper[4661]: W1001 05:50:25.482212 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a9e6b8f_69ef_4dc3_87ae_132c47989184.slice/crio-e0bd3aff5b25e664aed3a0380e3bcb535504ef2f891182aeebed4a3fe837dc2b WatchSource:0}: Error finding container e0bd3aff5b25e664aed3a0380e3bcb535504ef2f891182aeebed4a3fe837dc2b: Status 404 returned error can't find the container with id e0bd3aff5b25e664aed3a0380e3bcb535504ef2f891182aeebed4a3fe837dc2b Oct 01 05:50:25 crc kubenswrapper[4661]: I1001 05:50:25.780923 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24db082c-81fd-427d-a0b0-57b64adc6f73" path="/var/lib/kubelet/pods/24db082c-81fd-427d-a0b0-57b64adc6f73/volumes" Oct 01 05:50:25 crc kubenswrapper[4661]: I1001 05:50:25.883067 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 05:50:25 crc kubenswrapper[4661]: I1001 05:50:25.951481 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-combined-ca-bundle\") pod \"fd19589d-f86d-440a-92a9-b6bf02e4989a\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " Oct 01 05:50:25 crc kubenswrapper[4661]: I1001 05:50:25.951673 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-config-data\") pod \"fd19589d-f86d-440a-92a9-b6bf02e4989a\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " Oct 01 05:50:25 crc kubenswrapper[4661]: I1001 05:50:25.951722 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd19589d-f86d-440a-92a9-b6bf02e4989a-run-httpd\") pod \"fd19589d-f86d-440a-92a9-b6bf02e4989a\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " Oct 01 05:50:25 crc kubenswrapper[4661]: I1001 05:50:25.951748 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd19589d-f86d-440a-92a9-b6bf02e4989a-log-httpd\") pod \"fd19589d-f86d-440a-92a9-b6bf02e4989a\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " Oct 01 05:50:25 crc kubenswrapper[4661]: I1001 05:50:25.951779 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-scripts\") pod \"fd19589d-f86d-440a-92a9-b6bf02e4989a\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " Oct 01 05:50:25 crc kubenswrapper[4661]: I1001 05:50:25.951818 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-sg-core-conf-yaml\") pod \"fd19589d-f86d-440a-92a9-b6bf02e4989a\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " Oct 01 05:50:25 crc kubenswrapper[4661]: I1001 05:50:25.951862 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2k2bm\" (UniqueName: \"kubernetes.io/projected/fd19589d-f86d-440a-92a9-b6bf02e4989a-kube-api-access-2k2bm\") pod \"fd19589d-f86d-440a-92a9-b6bf02e4989a\" (UID: \"fd19589d-f86d-440a-92a9-b6bf02e4989a\") " Oct 01 05:50:25 crc kubenswrapper[4661]: I1001 05:50:25.952820 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd19589d-f86d-440a-92a9-b6bf02e4989a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "fd19589d-f86d-440a-92a9-b6bf02e4989a" (UID: "fd19589d-f86d-440a-92a9-b6bf02e4989a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:50:25 crc kubenswrapper[4661]: I1001 05:50:25.955277 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd19589d-f86d-440a-92a9-b6bf02e4989a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "fd19589d-f86d-440a-92a9-b6bf02e4989a" (UID: "fd19589d-f86d-440a-92a9-b6bf02e4989a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:50:25 crc kubenswrapper[4661]: I1001 05:50:25.957882 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd19589d-f86d-440a-92a9-b6bf02e4989a-kube-api-access-2k2bm" (OuterVolumeSpecName: "kube-api-access-2k2bm") pod "fd19589d-f86d-440a-92a9-b6bf02e4989a" (UID: "fd19589d-f86d-440a-92a9-b6bf02e4989a"). InnerVolumeSpecName "kube-api-access-2k2bm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:50:25 crc kubenswrapper[4661]: I1001 05:50:25.959873 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-scripts" (OuterVolumeSpecName: "scripts") pod "fd19589d-f86d-440a-92a9-b6bf02e4989a" (UID: "fd19589d-f86d-440a-92a9-b6bf02e4989a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:50:25 crc kubenswrapper[4661]: I1001 05:50:25.986912 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "fd19589d-f86d-440a-92a9-b6bf02e4989a" (UID: "fd19589d-f86d-440a-92a9-b6bf02e4989a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.048686 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd19589d-f86d-440a-92a9-b6bf02e4989a" (UID: "fd19589d-f86d-440a-92a9-b6bf02e4989a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.053899 4661 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.053927 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2k2bm\" (UniqueName: \"kubernetes.io/projected/fd19589d-f86d-440a-92a9-b6bf02e4989a-kube-api-access-2k2bm\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.053939 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.053949 4661 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd19589d-f86d-440a-92a9-b6bf02e4989a-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.053957 4661 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd19589d-f86d-440a-92a9-b6bf02e4989a-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.053965 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.061826 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-config-data" (OuterVolumeSpecName: "config-data") pod "fd19589d-f86d-440a-92a9-b6bf02e4989a" (UID: "fd19589d-f86d-440a-92a9-b6bf02e4989a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.156120 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd19589d-f86d-440a-92a9-b6bf02e4989a-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.359939 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-mfflc" event={"ID":"7a9e6b8f-69ef-4dc3-87ae-132c47989184","Type":"ContainerStarted","Data":"77fffd9e2758ac2d682816aff53c9b15f686aabe00f81898542207df802d3507"} Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.360002 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-mfflc" event={"ID":"7a9e6b8f-69ef-4dc3-87ae-132c47989184","Type":"ContainerStarted","Data":"e0bd3aff5b25e664aed3a0380e3bcb535504ef2f891182aeebed4a3fe837dc2b"} Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.363281 4661 generic.go:334] "Generic (PLEG): container finished" podID="fd19589d-f86d-440a-92a9-b6bf02e4989a" containerID="72e2ebd3f4ed64d1e0369371c926f4d0f8eedd59dadbc862a4a5535a7e40a269" exitCode=0 Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.363310 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fd19589d-f86d-440a-92a9-b6bf02e4989a","Type":"ContainerDied","Data":"72e2ebd3f4ed64d1e0369371c926f4d0f8eedd59dadbc862a4a5535a7e40a269"} Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.363329 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fd19589d-f86d-440a-92a9-b6bf02e4989a","Type":"ContainerDied","Data":"e1096d6a659dd8c9c7e0f5f1e0abe589e5881a8f67a4b16cfd2a618843cc3a31"} Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.363348 4661 scope.go:117] "RemoveContainer" containerID="b28e786d900403a5852a547e8857b046f109ea3b6bce3c41cab351e67b49912b" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.363368 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.378844 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-mfflc" podStartSLOduration=2.378825306 podStartE2EDuration="2.378825306s" podCreationTimestamp="2025-10-01 05:50:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:50:26.376494722 +0000 UTC m=+1275.314473376" watchObservedRunningTime="2025-10-01 05:50:26.378825306 +0000 UTC m=+1275.316803930" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.388278 4661 scope.go:117] "RemoveContainer" containerID="946fb21648cc87a3d94219ba4bbdce4d6a43014fb5734ce3808a64705986b318" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.412349 4661 scope.go:117] "RemoveContainer" containerID="72e2ebd3f4ed64d1e0369371c926f4d0f8eedd59dadbc862a4a5535a7e40a269" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.412610 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.440320 4661 scope.go:117] "RemoveContainer" containerID="880e3bdb36218da33344d7652c8ea6470a91fbd2fafe0a87e3aa5ee18c2507e5" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.444834 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.544559 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:50:26 crc kubenswrapper[4661]: E1001 05:50:26.545263 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd19589d-f86d-440a-92a9-b6bf02e4989a" containerName="ceilometer-central-agent" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.545276 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd19589d-f86d-440a-92a9-b6bf02e4989a" containerName="ceilometer-central-agent" Oct 01 05:50:26 crc kubenswrapper[4661]: E1001 05:50:26.545289 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd19589d-f86d-440a-92a9-b6bf02e4989a" containerName="sg-core" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.545294 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd19589d-f86d-440a-92a9-b6bf02e4989a" containerName="sg-core" Oct 01 05:50:26 crc kubenswrapper[4661]: E1001 05:50:26.545304 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd19589d-f86d-440a-92a9-b6bf02e4989a" containerName="proxy-httpd" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.545311 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd19589d-f86d-440a-92a9-b6bf02e4989a" containerName="proxy-httpd" Oct 01 05:50:26 crc kubenswrapper[4661]: E1001 05:50:26.545327 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd19589d-f86d-440a-92a9-b6bf02e4989a" containerName="ceilometer-notification-agent" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.545333 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd19589d-f86d-440a-92a9-b6bf02e4989a" containerName="ceilometer-notification-agent" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.545521 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd19589d-f86d-440a-92a9-b6bf02e4989a" containerName="proxy-httpd" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.545532 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd19589d-f86d-440a-92a9-b6bf02e4989a" containerName="ceilometer-central-agent" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.545545 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd19589d-f86d-440a-92a9-b6bf02e4989a" containerName="sg-core" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.545557 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd19589d-f86d-440a-92a9-b6bf02e4989a" containerName="ceilometer-notification-agent" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.547161 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.556736 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.560746 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.561334 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.561497 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.667306 4661 scope.go:117] "RemoveContainer" containerID="b28e786d900403a5852a547e8857b046f109ea3b6bce3c41cab351e67b49912b" Oct 01 05:50:26 crc kubenswrapper[4661]: E1001 05:50:26.668375 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b28e786d900403a5852a547e8857b046f109ea3b6bce3c41cab351e67b49912b\": container with ID starting with b28e786d900403a5852a547e8857b046f109ea3b6bce3c41cab351e67b49912b not found: ID does not exist" containerID="b28e786d900403a5852a547e8857b046f109ea3b6bce3c41cab351e67b49912b" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.668410 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b28e786d900403a5852a547e8857b046f109ea3b6bce3c41cab351e67b49912b"} err="failed to get container status \"b28e786d900403a5852a547e8857b046f109ea3b6bce3c41cab351e67b49912b\": rpc error: code = NotFound desc = could not find container \"b28e786d900403a5852a547e8857b046f109ea3b6bce3c41cab351e67b49912b\": container with ID starting with b28e786d900403a5852a547e8857b046f109ea3b6bce3c41cab351e67b49912b not found: ID does not exist" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.668431 4661 scope.go:117] "RemoveContainer" containerID="946fb21648cc87a3d94219ba4bbdce4d6a43014fb5734ce3808a64705986b318" Oct 01 05:50:26 crc kubenswrapper[4661]: E1001 05:50:26.668746 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"946fb21648cc87a3d94219ba4bbdce4d6a43014fb5734ce3808a64705986b318\": container with ID starting with 946fb21648cc87a3d94219ba4bbdce4d6a43014fb5734ce3808a64705986b318 not found: ID does not exist" containerID="946fb21648cc87a3d94219ba4bbdce4d6a43014fb5734ce3808a64705986b318" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.668782 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"946fb21648cc87a3d94219ba4bbdce4d6a43014fb5734ce3808a64705986b318"} err="failed to get container status \"946fb21648cc87a3d94219ba4bbdce4d6a43014fb5734ce3808a64705986b318\": rpc error: code = NotFound desc = could not find container \"946fb21648cc87a3d94219ba4bbdce4d6a43014fb5734ce3808a64705986b318\": container with ID starting with 946fb21648cc87a3d94219ba4bbdce4d6a43014fb5734ce3808a64705986b318 not found: ID does not exist" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.670498 4661 scope.go:117] "RemoveContainer" containerID="72e2ebd3f4ed64d1e0369371c926f4d0f8eedd59dadbc862a4a5535a7e40a269" Oct 01 05:50:26 crc kubenswrapper[4661]: E1001 05:50:26.674166 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72e2ebd3f4ed64d1e0369371c926f4d0f8eedd59dadbc862a4a5535a7e40a269\": container with ID starting with 72e2ebd3f4ed64d1e0369371c926f4d0f8eedd59dadbc862a4a5535a7e40a269 not found: ID does not exist" containerID="72e2ebd3f4ed64d1e0369371c926f4d0f8eedd59dadbc862a4a5535a7e40a269" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.674223 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72e2ebd3f4ed64d1e0369371c926f4d0f8eedd59dadbc862a4a5535a7e40a269"} err="failed to get container status \"72e2ebd3f4ed64d1e0369371c926f4d0f8eedd59dadbc862a4a5535a7e40a269\": rpc error: code = NotFound desc = could not find container \"72e2ebd3f4ed64d1e0369371c926f4d0f8eedd59dadbc862a4a5535a7e40a269\": container with ID starting with 72e2ebd3f4ed64d1e0369371c926f4d0f8eedd59dadbc862a4a5535a7e40a269 not found: ID does not exist" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.674257 4661 scope.go:117] "RemoveContainer" containerID="880e3bdb36218da33344d7652c8ea6470a91fbd2fafe0a87e3aa5ee18c2507e5" Oct 01 05:50:26 crc kubenswrapper[4661]: E1001 05:50:26.677165 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"880e3bdb36218da33344d7652c8ea6470a91fbd2fafe0a87e3aa5ee18c2507e5\": container with ID starting with 880e3bdb36218da33344d7652c8ea6470a91fbd2fafe0a87e3aa5ee18c2507e5 not found: ID does not exist" containerID="880e3bdb36218da33344d7652c8ea6470a91fbd2fafe0a87e3aa5ee18c2507e5" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.677348 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"880e3bdb36218da33344d7652c8ea6470a91fbd2fafe0a87e3aa5ee18c2507e5"} err="failed to get container status \"880e3bdb36218da33344d7652c8ea6470a91fbd2fafe0a87e3aa5ee18c2507e5\": rpc error: code = NotFound desc = could not find container \"880e3bdb36218da33344d7652c8ea6470a91fbd2fafe0a87e3aa5ee18c2507e5\": container with ID starting with 880e3bdb36218da33344d7652c8ea6470a91fbd2fafe0a87e3aa5ee18c2507e5 not found: ID does not exist" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.678479 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6dda16ec-71bd-4cca-b332-96772962b417-run-httpd\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.678553 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6dda16ec-71bd-4cca-b332-96772962b417-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.678593 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dda16ec-71bd-4cca-b332-96772962b417-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.678663 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dda16ec-71bd-4cca-b332-96772962b417-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.678681 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dda16ec-71bd-4cca-b332-96772962b417-config-data\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.678702 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6dda16ec-71bd-4cca-b332-96772962b417-scripts\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.678724 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5gsk\" (UniqueName: \"kubernetes.io/projected/6dda16ec-71bd-4cca-b332-96772962b417-kube-api-access-w5gsk\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.678752 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6dda16ec-71bd-4cca-b332-96772962b417-log-httpd\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.779809 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6dda16ec-71bd-4cca-b332-96772962b417-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.779864 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dda16ec-71bd-4cca-b332-96772962b417-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.779933 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dda16ec-71bd-4cca-b332-96772962b417-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.779953 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dda16ec-71bd-4cca-b332-96772962b417-config-data\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.779995 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6dda16ec-71bd-4cca-b332-96772962b417-scripts\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.780020 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5gsk\" (UniqueName: \"kubernetes.io/projected/6dda16ec-71bd-4cca-b332-96772962b417-kube-api-access-w5gsk\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.780047 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6dda16ec-71bd-4cca-b332-96772962b417-log-httpd\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.780089 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6dda16ec-71bd-4cca-b332-96772962b417-run-httpd\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.781136 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6dda16ec-71bd-4cca-b332-96772962b417-run-httpd\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.781263 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6dda16ec-71bd-4cca-b332-96772962b417-log-httpd\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.784505 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dda16ec-71bd-4cca-b332-96772962b417-config-data\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.785219 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dda16ec-71bd-4cca-b332-96772962b417-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.785258 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6dda16ec-71bd-4cca-b332-96772962b417-scripts\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.785411 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dda16ec-71bd-4cca-b332-96772962b417-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.795729 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6dda16ec-71bd-4cca-b332-96772962b417-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.796566 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5gsk\" (UniqueName: \"kubernetes.io/projected/6dda16ec-71bd-4cca-b332-96772962b417-kube-api-access-w5gsk\") pod \"ceilometer-0\" (UID: \"6dda16ec-71bd-4cca-b332-96772962b417\") " pod="openstack/ceilometer-0" Oct 01 05:50:26 crc kubenswrapper[4661]: I1001 05:50:26.945942 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 05:50:27 crc kubenswrapper[4661]: I1001 05:50:27.461732 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 05:50:27 crc kubenswrapper[4661]: W1001 05:50:27.469427 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6dda16ec_71bd_4cca_b332_96772962b417.slice/crio-319b5593dfc6f0d0c248e14563a760b5b1e86b6cc7f518ebf5905196aa078823 WatchSource:0}: Error finding container 319b5593dfc6f0d0c248e14563a760b5b1e86b6cc7f518ebf5905196aa078823: Status 404 returned error can't find the container with id 319b5593dfc6f0d0c248e14563a760b5b1e86b6cc7f518ebf5905196aa078823 Oct 01 05:50:27 crc kubenswrapper[4661]: I1001 05:50:27.799279 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd19589d-f86d-440a-92a9-b6bf02e4989a" path="/var/lib/kubelet/pods/fd19589d-f86d-440a-92a9-b6bf02e4989a/volumes" Oct 01 05:50:28 crc kubenswrapper[4661]: I1001 05:50:28.391472 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6dda16ec-71bd-4cca-b332-96772962b417","Type":"ContainerStarted","Data":"698047d56c3dd6b60e065f5f1c8bb9417cdd4934b1f73fc072af7feb8223bace"} Oct 01 05:50:28 crc kubenswrapper[4661]: I1001 05:50:28.391535 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6dda16ec-71bd-4cca-b332-96772962b417","Type":"ContainerStarted","Data":"f6f04b73ea4a0febfe209a5d491650860f5a5592a80fbb98631493e5e2f9df08"} Oct 01 05:50:28 crc kubenswrapper[4661]: I1001 05:50:28.391554 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6dda16ec-71bd-4cca-b332-96772962b417","Type":"ContainerStarted","Data":"319b5593dfc6f0d0c248e14563a760b5b1e86b6cc7f518ebf5905196aa078823"} Oct 01 05:50:28 crc kubenswrapper[4661]: I1001 05:50:28.620999 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 01 05:50:28 crc kubenswrapper[4661]: I1001 05:50:28.621262 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 01 05:50:29 crc kubenswrapper[4661]: I1001 05:50:29.420898 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6dda16ec-71bd-4cca-b332-96772962b417","Type":"ContainerStarted","Data":"23b0aafd486b97806fe0ef85e86782ebc9623855723b367d20f166897b271f07"} Oct 01 05:50:29 crc kubenswrapper[4661]: I1001 05:50:29.638787 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="96ecc383-d231-407b-8d53-31a3538d3ff7" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.217:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 05:50:29 crc kubenswrapper[4661]: I1001 05:50:29.638831 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="96ecc383-d231-407b-8d53-31a3538d3ff7" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.217:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 05:50:30 crc kubenswrapper[4661]: I1001 05:50:30.001099 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 01 05:50:31 crc kubenswrapper[4661]: I1001 05:50:31.460863 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6dda16ec-71bd-4cca-b332-96772962b417","Type":"ContainerStarted","Data":"e999ab753375abbd858d10c3be0f4a20881948bcb5cdf99849eba2083fc35d69"} Oct 01 05:50:31 crc kubenswrapper[4661]: I1001 05:50:31.462656 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 01 05:50:31 crc kubenswrapper[4661]: I1001 05:50:31.464753 4661 generic.go:334] "Generic (PLEG): container finished" podID="7a9e6b8f-69ef-4dc3-87ae-132c47989184" containerID="77fffd9e2758ac2d682816aff53c9b15f686aabe00f81898542207df802d3507" exitCode=0 Oct 01 05:50:31 crc kubenswrapper[4661]: I1001 05:50:31.464783 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-mfflc" event={"ID":"7a9e6b8f-69ef-4dc3-87ae-132c47989184","Type":"ContainerDied","Data":"77fffd9e2758ac2d682816aff53c9b15f686aabe00f81898542207df802d3507"} Oct 01 05:50:31 crc kubenswrapper[4661]: I1001 05:50:31.491880 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.6850537 podStartE2EDuration="5.491862495s" podCreationTimestamp="2025-10-01 05:50:26 +0000 UTC" firstStartedPulling="2025-10-01 05:50:27.472334676 +0000 UTC m=+1276.410313300" lastFinishedPulling="2025-10-01 05:50:30.279143481 +0000 UTC m=+1279.217122095" observedRunningTime="2025-10-01 05:50:31.483200989 +0000 UTC m=+1280.421179613" watchObservedRunningTime="2025-10-01 05:50:31.491862495 +0000 UTC m=+1280.429841109" Oct 01 05:50:32 crc kubenswrapper[4661]: I1001 05:50:32.879347 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-mfflc" Oct 01 05:50:32 crc kubenswrapper[4661]: I1001 05:50:32.921306 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a9e6b8f-69ef-4dc3-87ae-132c47989184-config-data\") pod \"7a9e6b8f-69ef-4dc3-87ae-132c47989184\" (UID: \"7a9e6b8f-69ef-4dc3-87ae-132c47989184\") " Oct 01 05:50:32 crc kubenswrapper[4661]: I1001 05:50:32.921491 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8h9fj\" (UniqueName: \"kubernetes.io/projected/7a9e6b8f-69ef-4dc3-87ae-132c47989184-kube-api-access-8h9fj\") pod \"7a9e6b8f-69ef-4dc3-87ae-132c47989184\" (UID: \"7a9e6b8f-69ef-4dc3-87ae-132c47989184\") " Oct 01 05:50:32 crc kubenswrapper[4661]: I1001 05:50:32.921560 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a9e6b8f-69ef-4dc3-87ae-132c47989184-scripts\") pod \"7a9e6b8f-69ef-4dc3-87ae-132c47989184\" (UID: \"7a9e6b8f-69ef-4dc3-87ae-132c47989184\") " Oct 01 05:50:32 crc kubenswrapper[4661]: I1001 05:50:32.921748 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a9e6b8f-69ef-4dc3-87ae-132c47989184-combined-ca-bundle\") pod \"7a9e6b8f-69ef-4dc3-87ae-132c47989184\" (UID: \"7a9e6b8f-69ef-4dc3-87ae-132c47989184\") " Oct 01 05:50:32 crc kubenswrapper[4661]: I1001 05:50:32.928939 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a9e6b8f-69ef-4dc3-87ae-132c47989184-scripts" (OuterVolumeSpecName: "scripts") pod "7a9e6b8f-69ef-4dc3-87ae-132c47989184" (UID: "7a9e6b8f-69ef-4dc3-87ae-132c47989184"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:50:32 crc kubenswrapper[4661]: I1001 05:50:32.929736 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a9e6b8f-69ef-4dc3-87ae-132c47989184-kube-api-access-8h9fj" (OuterVolumeSpecName: "kube-api-access-8h9fj") pod "7a9e6b8f-69ef-4dc3-87ae-132c47989184" (UID: "7a9e6b8f-69ef-4dc3-87ae-132c47989184"). InnerVolumeSpecName "kube-api-access-8h9fj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:50:32 crc kubenswrapper[4661]: I1001 05:50:32.961475 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a9e6b8f-69ef-4dc3-87ae-132c47989184-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7a9e6b8f-69ef-4dc3-87ae-132c47989184" (UID: "7a9e6b8f-69ef-4dc3-87ae-132c47989184"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:50:32 crc kubenswrapper[4661]: I1001 05:50:32.973876 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a9e6b8f-69ef-4dc3-87ae-132c47989184-config-data" (OuterVolumeSpecName: "config-data") pod "7a9e6b8f-69ef-4dc3-87ae-132c47989184" (UID: "7a9e6b8f-69ef-4dc3-87ae-132c47989184"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:50:33 crc kubenswrapper[4661]: I1001 05:50:33.025404 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a9e6b8f-69ef-4dc3-87ae-132c47989184-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:33 crc kubenswrapper[4661]: I1001 05:50:33.025445 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8h9fj\" (UniqueName: \"kubernetes.io/projected/7a9e6b8f-69ef-4dc3-87ae-132c47989184-kube-api-access-8h9fj\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:33 crc kubenswrapper[4661]: I1001 05:50:33.025462 4661 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a9e6b8f-69ef-4dc3-87ae-132c47989184-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:33 crc kubenswrapper[4661]: I1001 05:50:33.025474 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a9e6b8f-69ef-4dc3-87ae-132c47989184-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:33 crc kubenswrapper[4661]: I1001 05:50:33.488355 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-mfflc" Oct 01 05:50:33 crc kubenswrapper[4661]: I1001 05:50:33.489293 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-mfflc" event={"ID":"7a9e6b8f-69ef-4dc3-87ae-132c47989184","Type":"ContainerDied","Data":"e0bd3aff5b25e664aed3a0380e3bcb535504ef2f891182aeebed4a3fe837dc2b"} Oct 01 05:50:33 crc kubenswrapper[4661]: I1001 05:50:33.489335 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e0bd3aff5b25e664aed3a0380e3bcb535504ef2f891182aeebed4a3fe837dc2b" Oct 01 05:50:33 crc kubenswrapper[4661]: I1001 05:50:33.675958 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 01 05:50:33 crc kubenswrapper[4661]: I1001 05:50:33.677285 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 01 05:50:33 crc kubenswrapper[4661]: I1001 05:50:33.700118 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 01 05:50:33 crc kubenswrapper[4661]: I1001 05:50:33.701835 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="96ecc383-d231-407b-8d53-31a3538d3ff7" containerName="nova-api-log" containerID="cri-o://84d3214d79b05dbbd6c686dbe08481d0e17f9bacfd705fa5e5bf5ce4cde86e06" gracePeriod=30 Oct 01 05:50:33 crc kubenswrapper[4661]: I1001 05:50:33.702096 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="96ecc383-d231-407b-8d53-31a3538d3ff7" containerName="nova-api-api" containerID="cri-o://c350845e0f7f000c2901675e4f516c9c1c9f30a0e92ba124ec1b7b37182972b4" gracePeriod=30 Oct 01 05:50:33 crc kubenswrapper[4661]: I1001 05:50:33.703908 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 01 05:50:33 crc kubenswrapper[4661]: I1001 05:50:33.767814 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 05:50:33 crc kubenswrapper[4661]: I1001 05:50:33.768360 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="c2f02b25-17c7-4302-b82f-d9954e27a070" containerName="nova-scheduler-scheduler" containerID="cri-o://118fe2ea5651e45b30689beb0017865229679d67fab488edcc7b8a2a667a2278" gracePeriod=30 Oct 01 05:50:33 crc kubenswrapper[4661]: I1001 05:50:33.797046 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 05:50:34 crc kubenswrapper[4661]: I1001 05:50:34.309116 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:50:34 crc kubenswrapper[4661]: I1001 05:50:34.309191 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:50:34 crc kubenswrapper[4661]: I1001 05:50:34.517191 4661 generic.go:334] "Generic (PLEG): container finished" podID="96ecc383-d231-407b-8d53-31a3538d3ff7" containerID="84d3214d79b05dbbd6c686dbe08481d0e17f9bacfd705fa5e5bf5ce4cde86e06" exitCode=143 Oct 01 05:50:34 crc kubenswrapper[4661]: I1001 05:50:34.517309 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"96ecc383-d231-407b-8d53-31a3538d3ff7","Type":"ContainerDied","Data":"84d3214d79b05dbbd6c686dbe08481d0e17f9bacfd705fa5e5bf5ce4cde86e06"} Oct 01 05:50:34 crc kubenswrapper[4661]: I1001 05:50:34.527767 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 01 05:50:34 crc kubenswrapper[4661]: E1001 05:50:34.688003 4661 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="118fe2ea5651e45b30689beb0017865229679d67fab488edcc7b8a2a667a2278" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 01 05:50:34 crc kubenswrapper[4661]: E1001 05:50:34.689440 4661 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="118fe2ea5651e45b30689beb0017865229679d67fab488edcc7b8a2a667a2278" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 01 05:50:34 crc kubenswrapper[4661]: E1001 05:50:34.693744 4661 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="118fe2ea5651e45b30689beb0017865229679d67fab488edcc7b8a2a667a2278" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 01 05:50:34 crc kubenswrapper[4661]: E1001 05:50:34.693826 4661 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="c2f02b25-17c7-4302-b82f-d9954e27a070" containerName="nova-scheduler-scheduler" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.326038 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.372826 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-internal-tls-certs\") pod \"96ecc383-d231-407b-8d53-31a3538d3ff7\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.372955 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kd9cb\" (UniqueName: \"kubernetes.io/projected/96ecc383-d231-407b-8d53-31a3538d3ff7-kube-api-access-kd9cb\") pod \"96ecc383-d231-407b-8d53-31a3538d3ff7\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.373086 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-config-data\") pod \"96ecc383-d231-407b-8d53-31a3538d3ff7\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.373229 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-combined-ca-bundle\") pod \"96ecc383-d231-407b-8d53-31a3538d3ff7\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.373287 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96ecc383-d231-407b-8d53-31a3538d3ff7-logs\") pod \"96ecc383-d231-407b-8d53-31a3538d3ff7\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.373346 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-public-tls-certs\") pod \"96ecc383-d231-407b-8d53-31a3538d3ff7\" (UID: \"96ecc383-d231-407b-8d53-31a3538d3ff7\") " Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.377284 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96ecc383-d231-407b-8d53-31a3538d3ff7-logs" (OuterVolumeSpecName: "logs") pod "96ecc383-d231-407b-8d53-31a3538d3ff7" (UID: "96ecc383-d231-407b-8d53-31a3538d3ff7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.391318 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96ecc383-d231-407b-8d53-31a3538d3ff7-kube-api-access-kd9cb" (OuterVolumeSpecName: "kube-api-access-kd9cb") pod "96ecc383-d231-407b-8d53-31a3538d3ff7" (UID: "96ecc383-d231-407b-8d53-31a3538d3ff7"). InnerVolumeSpecName "kube-api-access-kd9cb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.413104 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-config-data" (OuterVolumeSpecName: "config-data") pod "96ecc383-d231-407b-8d53-31a3538d3ff7" (UID: "96ecc383-d231-407b-8d53-31a3538d3ff7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.430809 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "96ecc383-d231-407b-8d53-31a3538d3ff7" (UID: "96ecc383-d231-407b-8d53-31a3538d3ff7"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.435551 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "96ecc383-d231-407b-8d53-31a3538d3ff7" (UID: "96ecc383-d231-407b-8d53-31a3538d3ff7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.453746 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "96ecc383-d231-407b-8d53-31a3538d3ff7" (UID: "96ecc383-d231-407b-8d53-31a3538d3ff7"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.475826 4661 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.475879 4661 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.475899 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kd9cb\" (UniqueName: \"kubernetes.io/projected/96ecc383-d231-407b-8d53-31a3538d3ff7-kube-api-access-kd9cb\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.475924 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.476920 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96ecc383-d231-407b-8d53-31a3538d3ff7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.476949 4661 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96ecc383-d231-407b-8d53-31a3538d3ff7-logs\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.534237 4661 generic.go:334] "Generic (PLEG): container finished" podID="96ecc383-d231-407b-8d53-31a3538d3ff7" containerID="c350845e0f7f000c2901675e4f516c9c1c9f30a0e92ba124ec1b7b37182972b4" exitCode=0 Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.534326 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.534351 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"96ecc383-d231-407b-8d53-31a3538d3ff7","Type":"ContainerDied","Data":"c350845e0f7f000c2901675e4f516c9c1c9f30a0e92ba124ec1b7b37182972b4"} Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.534796 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"96ecc383-d231-407b-8d53-31a3538d3ff7","Type":"ContainerDied","Data":"2278cccd80503138e73d4f5fd12d9220dae8b5ee8da97f10c830b45aca0ccf8b"} Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.534850 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="34d469db-7404-45b7-8a09-2e0a516ab469" containerName="nova-metadata-log" containerID="cri-o://01e4a4b449fb0a89ebcd62f37185cbed6cde1c8ae841433bdddf4340595ee61a" gracePeriod=30 Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.534985 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="34d469db-7404-45b7-8a09-2e0a516ab469" containerName="nova-metadata-metadata" containerID="cri-o://5fbbea816a44b7cabad1dd38708ad87338cf5facb2ce19cdf707cc2db9ef7f10" gracePeriod=30 Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.535182 4661 scope.go:117] "RemoveContainer" containerID="c350845e0f7f000c2901675e4f516c9c1c9f30a0e92ba124ec1b7b37182972b4" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.577545 4661 scope.go:117] "RemoveContainer" containerID="84d3214d79b05dbbd6c686dbe08481d0e17f9bacfd705fa5e5bf5ce4cde86e06" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.597481 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.602942 4661 scope.go:117] "RemoveContainer" containerID="c350845e0f7f000c2901675e4f516c9c1c9f30a0e92ba124ec1b7b37182972b4" Oct 01 05:50:35 crc kubenswrapper[4661]: E1001 05:50:35.604248 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c350845e0f7f000c2901675e4f516c9c1c9f30a0e92ba124ec1b7b37182972b4\": container with ID starting with c350845e0f7f000c2901675e4f516c9c1c9f30a0e92ba124ec1b7b37182972b4 not found: ID does not exist" containerID="c350845e0f7f000c2901675e4f516c9c1c9f30a0e92ba124ec1b7b37182972b4" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.604312 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c350845e0f7f000c2901675e4f516c9c1c9f30a0e92ba124ec1b7b37182972b4"} err="failed to get container status \"c350845e0f7f000c2901675e4f516c9c1c9f30a0e92ba124ec1b7b37182972b4\": rpc error: code = NotFound desc = could not find container \"c350845e0f7f000c2901675e4f516c9c1c9f30a0e92ba124ec1b7b37182972b4\": container with ID starting with c350845e0f7f000c2901675e4f516c9c1c9f30a0e92ba124ec1b7b37182972b4 not found: ID does not exist" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.604353 4661 scope.go:117] "RemoveContainer" containerID="84d3214d79b05dbbd6c686dbe08481d0e17f9bacfd705fa5e5bf5ce4cde86e06" Oct 01 05:50:35 crc kubenswrapper[4661]: E1001 05:50:35.605076 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84d3214d79b05dbbd6c686dbe08481d0e17f9bacfd705fa5e5bf5ce4cde86e06\": container with ID starting with 84d3214d79b05dbbd6c686dbe08481d0e17f9bacfd705fa5e5bf5ce4cde86e06 not found: ID does not exist" containerID="84d3214d79b05dbbd6c686dbe08481d0e17f9bacfd705fa5e5bf5ce4cde86e06" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.605115 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84d3214d79b05dbbd6c686dbe08481d0e17f9bacfd705fa5e5bf5ce4cde86e06"} err="failed to get container status \"84d3214d79b05dbbd6c686dbe08481d0e17f9bacfd705fa5e5bf5ce4cde86e06\": rpc error: code = NotFound desc = could not find container \"84d3214d79b05dbbd6c686dbe08481d0e17f9bacfd705fa5e5bf5ce4cde86e06\": container with ID starting with 84d3214d79b05dbbd6c686dbe08481d0e17f9bacfd705fa5e5bf5ce4cde86e06 not found: ID does not exist" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.610726 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.622409 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 01 05:50:35 crc kubenswrapper[4661]: E1001 05:50:35.622891 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96ecc383-d231-407b-8d53-31a3538d3ff7" containerName="nova-api-api" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.622909 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="96ecc383-d231-407b-8d53-31a3538d3ff7" containerName="nova-api-api" Oct 01 05:50:35 crc kubenswrapper[4661]: E1001 05:50:35.622918 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a9e6b8f-69ef-4dc3-87ae-132c47989184" containerName="nova-manage" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.622924 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a9e6b8f-69ef-4dc3-87ae-132c47989184" containerName="nova-manage" Oct 01 05:50:35 crc kubenswrapper[4661]: E1001 05:50:35.622939 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96ecc383-d231-407b-8d53-31a3538d3ff7" containerName="nova-api-log" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.622946 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="96ecc383-d231-407b-8d53-31a3538d3ff7" containerName="nova-api-log" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.623143 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="96ecc383-d231-407b-8d53-31a3538d3ff7" containerName="nova-api-api" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.623158 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a9e6b8f-69ef-4dc3-87ae-132c47989184" containerName="nova-manage" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.623174 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="96ecc383-d231-407b-8d53-31a3538d3ff7" containerName="nova-api-log" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.624240 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.626238 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.626659 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.631475 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.648362 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.681083 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636-public-tls-certs\") pod \"nova-api-0\" (UID: \"09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636\") " pod="openstack/nova-api-0" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.681164 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636\") " pod="openstack/nova-api-0" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.681325 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sfgn\" (UniqueName: \"kubernetes.io/projected/09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636-kube-api-access-9sfgn\") pod \"nova-api-0\" (UID: \"09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636\") " pod="openstack/nova-api-0" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.681437 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636-config-data\") pod \"nova-api-0\" (UID: \"09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636\") " pod="openstack/nova-api-0" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.681491 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636-internal-tls-certs\") pod \"nova-api-0\" (UID: \"09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636\") " pod="openstack/nova-api-0" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.681539 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636-logs\") pod \"nova-api-0\" (UID: \"09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636\") " pod="openstack/nova-api-0" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.774397 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96ecc383-d231-407b-8d53-31a3538d3ff7" path="/var/lib/kubelet/pods/96ecc383-d231-407b-8d53-31a3538d3ff7/volumes" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.783068 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636-public-tls-certs\") pod \"nova-api-0\" (UID: \"09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636\") " pod="openstack/nova-api-0" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.783124 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636\") " pod="openstack/nova-api-0" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.783171 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sfgn\" (UniqueName: \"kubernetes.io/projected/09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636-kube-api-access-9sfgn\") pod \"nova-api-0\" (UID: \"09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636\") " pod="openstack/nova-api-0" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.783271 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636-config-data\") pod \"nova-api-0\" (UID: \"09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636\") " pod="openstack/nova-api-0" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.783307 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636-internal-tls-certs\") pod \"nova-api-0\" (UID: \"09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636\") " pod="openstack/nova-api-0" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.783340 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636-logs\") pod \"nova-api-0\" (UID: \"09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636\") " pod="openstack/nova-api-0" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.783939 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636-logs\") pod \"nova-api-0\" (UID: \"09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636\") " pod="openstack/nova-api-0" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.788971 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636-internal-tls-certs\") pod \"nova-api-0\" (UID: \"09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636\") " pod="openstack/nova-api-0" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.789838 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636-public-tls-certs\") pod \"nova-api-0\" (UID: \"09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636\") " pod="openstack/nova-api-0" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.790719 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636-config-data\") pod \"nova-api-0\" (UID: \"09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636\") " pod="openstack/nova-api-0" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.791445 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636\") " pod="openstack/nova-api-0" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.801128 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sfgn\" (UniqueName: \"kubernetes.io/projected/09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636-kube-api-access-9sfgn\") pod \"nova-api-0\" (UID: \"09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636\") " pod="openstack/nova-api-0" Oct 01 05:50:35 crc kubenswrapper[4661]: I1001 05:50:35.947263 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 05:50:36 crc kubenswrapper[4661]: I1001 05:50:36.513885 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 05:50:36 crc kubenswrapper[4661]: I1001 05:50:36.558011 4661 generic.go:334] "Generic (PLEG): container finished" podID="34d469db-7404-45b7-8a09-2e0a516ab469" containerID="5fbbea816a44b7cabad1dd38708ad87338cf5facb2ce19cdf707cc2db9ef7f10" exitCode=0 Oct 01 05:50:36 crc kubenswrapper[4661]: I1001 05:50:36.558426 4661 generic.go:334] "Generic (PLEG): container finished" podID="34d469db-7404-45b7-8a09-2e0a516ab469" containerID="01e4a4b449fb0a89ebcd62f37185cbed6cde1c8ae841433bdddf4340595ee61a" exitCode=143 Oct 01 05:50:36 crc kubenswrapper[4661]: I1001 05:50:36.558491 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"34d469db-7404-45b7-8a09-2e0a516ab469","Type":"ContainerDied","Data":"5fbbea816a44b7cabad1dd38708ad87338cf5facb2ce19cdf707cc2db9ef7f10"} Oct 01 05:50:36 crc kubenswrapper[4661]: I1001 05:50:36.558530 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"34d469db-7404-45b7-8a09-2e0a516ab469","Type":"ContainerDied","Data":"01e4a4b449fb0a89ebcd62f37185cbed6cde1c8ae841433bdddf4340595ee61a"} Oct 01 05:50:36 crc kubenswrapper[4661]: I1001 05:50:36.562277 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636","Type":"ContainerStarted","Data":"83d1a8898660e25628bda0a50afda873c05bb73d3629f09404184ca8db5b453b"} Oct 01 05:50:36 crc kubenswrapper[4661]: I1001 05:50:36.829150 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 05:50:36 crc kubenswrapper[4661]: I1001 05:50:36.908018 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34d469db-7404-45b7-8a09-2e0a516ab469-config-data\") pod \"34d469db-7404-45b7-8a09-2e0a516ab469\" (UID: \"34d469db-7404-45b7-8a09-2e0a516ab469\") " Oct 01 05:50:36 crc kubenswrapper[4661]: I1001 05:50:36.908454 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34d469db-7404-45b7-8a09-2e0a516ab469-logs\") pod \"34d469db-7404-45b7-8a09-2e0a516ab469\" (UID: \"34d469db-7404-45b7-8a09-2e0a516ab469\") " Oct 01 05:50:36 crc kubenswrapper[4661]: I1001 05:50:36.908655 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34d469db-7404-45b7-8a09-2e0a516ab469-combined-ca-bundle\") pod \"34d469db-7404-45b7-8a09-2e0a516ab469\" (UID: \"34d469db-7404-45b7-8a09-2e0a516ab469\") " Oct 01 05:50:36 crc kubenswrapper[4661]: I1001 05:50:36.908796 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/34d469db-7404-45b7-8a09-2e0a516ab469-nova-metadata-tls-certs\") pod \"34d469db-7404-45b7-8a09-2e0a516ab469\" (UID: \"34d469db-7404-45b7-8a09-2e0a516ab469\") " Oct 01 05:50:36 crc kubenswrapper[4661]: I1001 05:50:36.908833 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52k8f\" (UniqueName: \"kubernetes.io/projected/34d469db-7404-45b7-8a09-2e0a516ab469-kube-api-access-52k8f\") pod \"34d469db-7404-45b7-8a09-2e0a516ab469\" (UID: \"34d469db-7404-45b7-8a09-2e0a516ab469\") " Oct 01 05:50:36 crc kubenswrapper[4661]: I1001 05:50:36.909555 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34d469db-7404-45b7-8a09-2e0a516ab469-logs" (OuterVolumeSpecName: "logs") pod "34d469db-7404-45b7-8a09-2e0a516ab469" (UID: "34d469db-7404-45b7-8a09-2e0a516ab469"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:50:36 crc kubenswrapper[4661]: I1001 05:50:36.917488 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34d469db-7404-45b7-8a09-2e0a516ab469-kube-api-access-52k8f" (OuterVolumeSpecName: "kube-api-access-52k8f") pod "34d469db-7404-45b7-8a09-2e0a516ab469" (UID: "34d469db-7404-45b7-8a09-2e0a516ab469"). InnerVolumeSpecName "kube-api-access-52k8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:50:36 crc kubenswrapper[4661]: I1001 05:50:36.939856 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34d469db-7404-45b7-8a09-2e0a516ab469-config-data" (OuterVolumeSpecName: "config-data") pod "34d469db-7404-45b7-8a09-2e0a516ab469" (UID: "34d469db-7404-45b7-8a09-2e0a516ab469"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:50:36 crc kubenswrapper[4661]: I1001 05:50:36.956549 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34d469db-7404-45b7-8a09-2e0a516ab469-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "34d469db-7404-45b7-8a09-2e0a516ab469" (UID: "34d469db-7404-45b7-8a09-2e0a516ab469"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:50:36 crc kubenswrapper[4661]: I1001 05:50:36.992034 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34d469db-7404-45b7-8a09-2e0a516ab469-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "34d469db-7404-45b7-8a09-2e0a516ab469" (UID: "34d469db-7404-45b7-8a09-2e0a516ab469"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.013744 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34d469db-7404-45b7-8a09-2e0a516ab469-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.013841 4661 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/34d469db-7404-45b7-8a09-2e0a516ab469-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.013859 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52k8f\" (UniqueName: \"kubernetes.io/projected/34d469db-7404-45b7-8a09-2e0a516ab469-kube-api-access-52k8f\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.013871 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34d469db-7404-45b7-8a09-2e0a516ab469-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.013883 4661 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34d469db-7404-45b7-8a09-2e0a516ab469-logs\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.576909 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636","Type":"ContainerStarted","Data":"d49ff039701a972163ea0efae5368421a3d477080d9a5e76474ca479cb01cd6c"} Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.576986 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636","Type":"ContainerStarted","Data":"dfba196a83f8a3dcf3cd1e56bae08e33cb60b53db5ec667c88c7a89234536e40"} Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.579306 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"34d469db-7404-45b7-8a09-2e0a516ab469","Type":"ContainerDied","Data":"faad0d6b87881be2c0bc08a118bff3e001101e69fa3e859e12a1202eed6e88b9"} Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.579414 4661 scope.go:117] "RemoveContainer" containerID="5fbbea816a44b7cabad1dd38708ad87338cf5facb2ce19cdf707cc2db9ef7f10" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.579440 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.606442 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.606419752 podStartE2EDuration="2.606419752s" podCreationTimestamp="2025-10-01 05:50:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:50:37.601235631 +0000 UTC m=+1286.539214245" watchObservedRunningTime="2025-10-01 05:50:37.606419752 +0000 UTC m=+1286.544398376" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.646514 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.652822 4661 scope.go:117] "RemoveContainer" containerID="01e4a4b449fb0a89ebcd62f37185cbed6cde1c8ae841433bdddf4340595ee61a" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.671133 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.682550 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 01 05:50:37 crc kubenswrapper[4661]: E1001 05:50:37.683082 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34d469db-7404-45b7-8a09-2e0a516ab469" containerName="nova-metadata-metadata" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.683102 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="34d469db-7404-45b7-8a09-2e0a516ab469" containerName="nova-metadata-metadata" Oct 01 05:50:37 crc kubenswrapper[4661]: E1001 05:50:37.683131 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34d469db-7404-45b7-8a09-2e0a516ab469" containerName="nova-metadata-log" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.683140 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="34d469db-7404-45b7-8a09-2e0a516ab469" containerName="nova-metadata-log" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.683381 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="34d469db-7404-45b7-8a09-2e0a516ab469" containerName="nova-metadata-log" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.683402 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="34d469db-7404-45b7-8a09-2e0a516ab469" containerName="nova-metadata-metadata" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.684865 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.692283 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.692418 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.692576 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.726197 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/299202ce-4dac-4387-8684-b94ca8f9f1b3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"299202ce-4dac-4387-8684-b94ca8f9f1b3\") " pod="openstack/nova-metadata-0" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.726277 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/299202ce-4dac-4387-8684-b94ca8f9f1b3-config-data\") pod \"nova-metadata-0\" (UID: \"299202ce-4dac-4387-8684-b94ca8f9f1b3\") " pod="openstack/nova-metadata-0" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.726498 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/299202ce-4dac-4387-8684-b94ca8f9f1b3-logs\") pod \"nova-metadata-0\" (UID: \"299202ce-4dac-4387-8684-b94ca8f9f1b3\") " pod="openstack/nova-metadata-0" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.726569 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/299202ce-4dac-4387-8684-b94ca8f9f1b3-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"299202ce-4dac-4387-8684-b94ca8f9f1b3\") " pod="openstack/nova-metadata-0" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.726744 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tj2cq\" (UniqueName: \"kubernetes.io/projected/299202ce-4dac-4387-8684-b94ca8f9f1b3-kube-api-access-tj2cq\") pod \"nova-metadata-0\" (UID: \"299202ce-4dac-4387-8684-b94ca8f9f1b3\") " pod="openstack/nova-metadata-0" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.767644 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34d469db-7404-45b7-8a09-2e0a516ab469" path="/var/lib/kubelet/pods/34d469db-7404-45b7-8a09-2e0a516ab469/volumes" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.828249 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tj2cq\" (UniqueName: \"kubernetes.io/projected/299202ce-4dac-4387-8684-b94ca8f9f1b3-kube-api-access-tj2cq\") pod \"nova-metadata-0\" (UID: \"299202ce-4dac-4387-8684-b94ca8f9f1b3\") " pod="openstack/nova-metadata-0" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.828345 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/299202ce-4dac-4387-8684-b94ca8f9f1b3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"299202ce-4dac-4387-8684-b94ca8f9f1b3\") " pod="openstack/nova-metadata-0" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.828395 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/299202ce-4dac-4387-8684-b94ca8f9f1b3-config-data\") pod \"nova-metadata-0\" (UID: \"299202ce-4dac-4387-8684-b94ca8f9f1b3\") " pod="openstack/nova-metadata-0" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.828444 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/299202ce-4dac-4387-8684-b94ca8f9f1b3-logs\") pod \"nova-metadata-0\" (UID: \"299202ce-4dac-4387-8684-b94ca8f9f1b3\") " pod="openstack/nova-metadata-0" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.828469 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/299202ce-4dac-4387-8684-b94ca8f9f1b3-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"299202ce-4dac-4387-8684-b94ca8f9f1b3\") " pod="openstack/nova-metadata-0" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.829508 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/299202ce-4dac-4387-8684-b94ca8f9f1b3-logs\") pod \"nova-metadata-0\" (UID: \"299202ce-4dac-4387-8684-b94ca8f9f1b3\") " pod="openstack/nova-metadata-0" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.836346 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/299202ce-4dac-4387-8684-b94ca8f9f1b3-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"299202ce-4dac-4387-8684-b94ca8f9f1b3\") " pod="openstack/nova-metadata-0" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.836473 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/299202ce-4dac-4387-8684-b94ca8f9f1b3-config-data\") pod \"nova-metadata-0\" (UID: \"299202ce-4dac-4387-8684-b94ca8f9f1b3\") " pod="openstack/nova-metadata-0" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.836940 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/299202ce-4dac-4387-8684-b94ca8f9f1b3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"299202ce-4dac-4387-8684-b94ca8f9f1b3\") " pod="openstack/nova-metadata-0" Oct 01 05:50:37 crc kubenswrapper[4661]: I1001 05:50:37.856243 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tj2cq\" (UniqueName: \"kubernetes.io/projected/299202ce-4dac-4387-8684-b94ca8f9f1b3-kube-api-access-tj2cq\") pod \"nova-metadata-0\" (UID: \"299202ce-4dac-4387-8684-b94ca8f9f1b3\") " pod="openstack/nova-metadata-0" Oct 01 05:50:38 crc kubenswrapper[4661]: I1001 05:50:38.009803 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 05:50:38 crc kubenswrapper[4661]: I1001 05:50:38.580725 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 05:50:38 crc kubenswrapper[4661]: W1001 05:50:38.585951 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod299202ce_4dac_4387_8684_b94ca8f9f1b3.slice/crio-bf26e477cf6f7cce30c6346a91d0d2a48804af99695beb363b7faf1ea4cb565c WatchSource:0}: Error finding container bf26e477cf6f7cce30c6346a91d0d2a48804af99695beb363b7faf1ea4cb565c: Status 404 returned error can't find the container with id bf26e477cf6f7cce30c6346a91d0d2a48804af99695beb363b7faf1ea4cb565c Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.192991 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.257712 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9vf9\" (UniqueName: \"kubernetes.io/projected/c2f02b25-17c7-4302-b82f-d9954e27a070-kube-api-access-z9vf9\") pod \"c2f02b25-17c7-4302-b82f-d9954e27a070\" (UID: \"c2f02b25-17c7-4302-b82f-d9954e27a070\") " Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.258050 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2f02b25-17c7-4302-b82f-d9954e27a070-config-data\") pod \"c2f02b25-17c7-4302-b82f-d9954e27a070\" (UID: \"c2f02b25-17c7-4302-b82f-d9954e27a070\") " Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.258072 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2f02b25-17c7-4302-b82f-d9954e27a070-combined-ca-bundle\") pod \"c2f02b25-17c7-4302-b82f-d9954e27a070\" (UID: \"c2f02b25-17c7-4302-b82f-d9954e27a070\") " Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.269044 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2f02b25-17c7-4302-b82f-d9954e27a070-kube-api-access-z9vf9" (OuterVolumeSpecName: "kube-api-access-z9vf9") pod "c2f02b25-17c7-4302-b82f-d9954e27a070" (UID: "c2f02b25-17c7-4302-b82f-d9954e27a070"). InnerVolumeSpecName "kube-api-access-z9vf9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.329533 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2f02b25-17c7-4302-b82f-d9954e27a070-config-data" (OuterVolumeSpecName: "config-data") pod "c2f02b25-17c7-4302-b82f-d9954e27a070" (UID: "c2f02b25-17c7-4302-b82f-d9954e27a070"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.349868 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2f02b25-17c7-4302-b82f-d9954e27a070-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c2f02b25-17c7-4302-b82f-d9954e27a070" (UID: "c2f02b25-17c7-4302-b82f-d9954e27a070"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.360495 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9vf9\" (UniqueName: \"kubernetes.io/projected/c2f02b25-17c7-4302-b82f-d9954e27a070-kube-api-access-z9vf9\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.360544 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2f02b25-17c7-4302-b82f-d9954e27a070-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.360566 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2f02b25-17c7-4302-b82f-d9954e27a070-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.609537 4661 generic.go:334] "Generic (PLEG): container finished" podID="c2f02b25-17c7-4302-b82f-d9954e27a070" containerID="118fe2ea5651e45b30689beb0017865229679d67fab488edcc7b8a2a667a2278" exitCode=0 Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.609579 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.609627 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c2f02b25-17c7-4302-b82f-d9954e27a070","Type":"ContainerDied","Data":"118fe2ea5651e45b30689beb0017865229679d67fab488edcc7b8a2a667a2278"} Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.609707 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c2f02b25-17c7-4302-b82f-d9954e27a070","Type":"ContainerDied","Data":"a698b0611ee6615dfb8b6623ec75a8bc6e5cfa0f1fbc9a8d4ab981f1f4370869"} Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.609733 4661 scope.go:117] "RemoveContainer" containerID="118fe2ea5651e45b30689beb0017865229679d67fab488edcc7b8a2a667a2278" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.612255 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"299202ce-4dac-4387-8684-b94ca8f9f1b3","Type":"ContainerStarted","Data":"040d0f049b24837e9e640a7f7c1ab306b40ba8302d85113bf8dbb53703698dd8"} Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.612289 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"299202ce-4dac-4387-8684-b94ca8f9f1b3","Type":"ContainerStarted","Data":"fa44cd0591a0ee4128cd7018144ef746203c7bec92b7519805c489ae81a0bf21"} Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.612303 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"299202ce-4dac-4387-8684-b94ca8f9f1b3","Type":"ContainerStarted","Data":"bf26e477cf6f7cce30c6346a91d0d2a48804af99695beb363b7faf1ea4cb565c"} Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.637043 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.637020764 podStartE2EDuration="2.637020764s" podCreationTimestamp="2025-10-01 05:50:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:50:39.633151049 +0000 UTC m=+1288.571129673" watchObservedRunningTime="2025-10-01 05:50:39.637020764 +0000 UTC m=+1288.574999388" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.642858 4661 scope.go:117] "RemoveContainer" containerID="118fe2ea5651e45b30689beb0017865229679d67fab488edcc7b8a2a667a2278" Oct 01 05:50:39 crc kubenswrapper[4661]: E1001 05:50:39.644344 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"118fe2ea5651e45b30689beb0017865229679d67fab488edcc7b8a2a667a2278\": container with ID starting with 118fe2ea5651e45b30689beb0017865229679d67fab488edcc7b8a2a667a2278 not found: ID does not exist" containerID="118fe2ea5651e45b30689beb0017865229679d67fab488edcc7b8a2a667a2278" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.644391 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"118fe2ea5651e45b30689beb0017865229679d67fab488edcc7b8a2a667a2278"} err="failed to get container status \"118fe2ea5651e45b30689beb0017865229679d67fab488edcc7b8a2a667a2278\": rpc error: code = NotFound desc = could not find container \"118fe2ea5651e45b30689beb0017865229679d67fab488edcc7b8a2a667a2278\": container with ID starting with 118fe2ea5651e45b30689beb0017865229679d67fab488edcc7b8a2a667a2278 not found: ID does not exist" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.660538 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.676398 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.687680 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 05:50:39 crc kubenswrapper[4661]: E1001 05:50:39.688175 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2f02b25-17c7-4302-b82f-d9954e27a070" containerName="nova-scheduler-scheduler" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.688196 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2f02b25-17c7-4302-b82f-d9954e27a070" containerName="nova-scheduler-scheduler" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.688465 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2f02b25-17c7-4302-b82f-d9954e27a070" containerName="nova-scheduler-scheduler" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.689341 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.697448 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.724622 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.767994 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2r5rj\" (UniqueName: \"kubernetes.io/projected/c1001022-d4fa-47f2-804f-480807988029-kube-api-access-2r5rj\") pod \"nova-scheduler-0\" (UID: \"c1001022-d4fa-47f2-804f-480807988029\") " pod="openstack/nova-scheduler-0" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.768094 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1001022-d4fa-47f2-804f-480807988029-config-data\") pod \"nova-scheduler-0\" (UID: \"c1001022-d4fa-47f2-804f-480807988029\") " pod="openstack/nova-scheduler-0" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.768121 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1001022-d4fa-47f2-804f-480807988029-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c1001022-d4fa-47f2-804f-480807988029\") " pod="openstack/nova-scheduler-0" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.769666 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2f02b25-17c7-4302-b82f-d9954e27a070" path="/var/lib/kubelet/pods/c2f02b25-17c7-4302-b82f-d9954e27a070/volumes" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.869266 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1001022-d4fa-47f2-804f-480807988029-config-data\") pod \"nova-scheduler-0\" (UID: \"c1001022-d4fa-47f2-804f-480807988029\") " pod="openstack/nova-scheduler-0" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.869323 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1001022-d4fa-47f2-804f-480807988029-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c1001022-d4fa-47f2-804f-480807988029\") " pod="openstack/nova-scheduler-0" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.869459 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2r5rj\" (UniqueName: \"kubernetes.io/projected/c1001022-d4fa-47f2-804f-480807988029-kube-api-access-2r5rj\") pod \"nova-scheduler-0\" (UID: \"c1001022-d4fa-47f2-804f-480807988029\") " pod="openstack/nova-scheduler-0" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.875432 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1001022-d4fa-47f2-804f-480807988029-config-data\") pod \"nova-scheduler-0\" (UID: \"c1001022-d4fa-47f2-804f-480807988029\") " pod="openstack/nova-scheduler-0" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.875435 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1001022-d4fa-47f2-804f-480807988029-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c1001022-d4fa-47f2-804f-480807988029\") " pod="openstack/nova-scheduler-0" Oct 01 05:50:39 crc kubenswrapper[4661]: I1001 05:50:39.891003 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2r5rj\" (UniqueName: \"kubernetes.io/projected/c1001022-d4fa-47f2-804f-480807988029-kube-api-access-2r5rj\") pod \"nova-scheduler-0\" (UID: \"c1001022-d4fa-47f2-804f-480807988029\") " pod="openstack/nova-scheduler-0" Oct 01 05:50:40 crc kubenswrapper[4661]: I1001 05:50:40.046536 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 05:50:40 crc kubenswrapper[4661]: I1001 05:50:40.399754 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 05:50:40 crc kubenswrapper[4661]: I1001 05:50:40.625926 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c1001022-d4fa-47f2-804f-480807988029","Type":"ContainerStarted","Data":"85aa8f95b6f6e4df47ef676af6c18b3efbf13e5f74e63522a57294911830c683"} Oct 01 05:50:41 crc kubenswrapper[4661]: I1001 05:50:41.641985 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c1001022-d4fa-47f2-804f-480807988029","Type":"ContainerStarted","Data":"9cef4e85a1e7de24bb933bbdbdec8d372c8371120eba2fdb8e1e00f40f6ffbd7"} Oct 01 05:50:41 crc kubenswrapper[4661]: I1001 05:50:41.685856 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.685829521 podStartE2EDuration="2.685829521s" podCreationTimestamp="2025-10-01 05:50:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:50:41.665839017 +0000 UTC m=+1290.603817671" watchObservedRunningTime="2025-10-01 05:50:41.685829521 +0000 UTC m=+1290.623808165" Oct 01 05:50:43 crc kubenswrapper[4661]: I1001 05:50:43.011687 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 01 05:50:43 crc kubenswrapper[4661]: I1001 05:50:43.012070 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 01 05:50:45 crc kubenswrapper[4661]: I1001 05:50:45.046896 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 01 05:50:45 crc kubenswrapper[4661]: I1001 05:50:45.947844 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 01 05:50:45 crc kubenswrapper[4661]: I1001 05:50:45.947923 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 01 05:50:46 crc kubenswrapper[4661]: I1001 05:50:46.965915 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.221:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 05:50:46 crc kubenswrapper[4661]: I1001 05:50:46.966698 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.221:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 05:50:48 crc kubenswrapper[4661]: I1001 05:50:48.011794 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 01 05:50:48 crc kubenswrapper[4661]: I1001 05:50:48.011866 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 01 05:50:49 crc kubenswrapper[4661]: I1001 05:50:49.031845 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="299202ce-4dac-4387-8684-b94ca8f9f1b3" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.222:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 05:50:49 crc kubenswrapper[4661]: I1001 05:50:49.031863 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="299202ce-4dac-4387-8684-b94ca8f9f1b3" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.222:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 05:50:50 crc kubenswrapper[4661]: I1001 05:50:50.046731 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 01 05:50:50 crc kubenswrapper[4661]: I1001 05:50:50.101874 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 01 05:50:50 crc kubenswrapper[4661]: I1001 05:50:50.820874 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 01 05:50:55 crc kubenswrapper[4661]: I1001 05:50:55.959855 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 01 05:50:55 crc kubenswrapper[4661]: I1001 05:50:55.961360 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 01 05:50:55 crc kubenswrapper[4661]: I1001 05:50:55.964258 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 01 05:50:55 crc kubenswrapper[4661]: I1001 05:50:55.982950 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 01 05:50:56 crc kubenswrapper[4661]: I1001 05:50:56.865615 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 01 05:50:56 crc kubenswrapper[4661]: I1001 05:50:56.876065 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 01 05:50:56 crc kubenswrapper[4661]: I1001 05:50:56.957659 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 01 05:50:58 crc kubenswrapper[4661]: I1001 05:50:58.016816 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 01 05:50:58 crc kubenswrapper[4661]: I1001 05:50:58.026060 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 01 05:50:58 crc kubenswrapper[4661]: I1001 05:50:58.030010 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 01 05:50:58 crc kubenswrapper[4661]: I1001 05:50:58.903027 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 01 05:51:04 crc kubenswrapper[4661]: I1001 05:51:04.309428 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:51:04 crc kubenswrapper[4661]: I1001 05:51:04.310371 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:51:07 crc kubenswrapper[4661]: I1001 05:51:07.079127 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 05:51:08 crc kubenswrapper[4661]: I1001 05:51:08.067742 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 05:51:10 crc kubenswrapper[4661]: I1001 05:51:10.496601 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="7b2acad5-a746-42a5-b9e8-a9904ad242bc" containerName="rabbitmq" containerID="cri-o://cee8f1a11352270182d3bca4ac8feb700d7870c9f94e70f1687b9bafc2489739" gracePeriod=604797 Oct 01 05:51:11 crc kubenswrapper[4661]: I1001 05:51:11.250336 4661 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="7b2acad5-a746-42a5-b9e8-a9904ad242bc" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.109:5671: connect: connection refused" Oct 01 05:51:11 crc kubenswrapper[4661]: I1001 05:51:11.496721 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="31336b4a-1953-44ab-b229-401a3a3ac031" containerName="rabbitmq" containerID="cri-o://2ada95fe4bda404b5dd58857619ad25cdc3c3801d25f51ee4e55f0ba0fc2deea" gracePeriod=604797 Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.030253 4661 generic.go:334] "Generic (PLEG): container finished" podID="7b2acad5-a746-42a5-b9e8-a9904ad242bc" containerID="cee8f1a11352270182d3bca4ac8feb700d7870c9f94e70f1687b9bafc2489739" exitCode=0 Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.030571 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7b2acad5-a746-42a5-b9e8-a9904ad242bc","Type":"ContainerDied","Data":"cee8f1a11352270182d3bca4ac8feb700d7870c9f94e70f1687b9bafc2489739"} Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.110467 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.167546 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.167673 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7b2acad5-a746-42a5-b9e8-a9904ad242bc-config-data\") pod \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.167758 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7b2acad5-a746-42a5-b9e8-a9904ad242bc-plugins-conf\") pod \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.167783 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfxbs\" (UniqueName: \"kubernetes.io/projected/7b2acad5-a746-42a5-b9e8-a9904ad242bc-kube-api-access-pfxbs\") pod \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.167835 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7b2acad5-a746-42a5-b9e8-a9904ad242bc-erlang-cookie-secret\") pod \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.167884 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-erlang-cookie\") pod \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.167910 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-tls\") pod \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.167987 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-confd\") pod \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.168013 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-plugins\") pod \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.168070 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7b2acad5-a746-42a5-b9e8-a9904ad242bc-server-conf\") pod \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.168169 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7b2acad5-a746-42a5-b9e8-a9904ad242bc-pod-info\") pod \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\" (UID: \"7b2acad5-a746-42a5-b9e8-a9904ad242bc\") " Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.168685 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "7b2acad5-a746-42a5-b9e8-a9904ad242bc" (UID: "7b2acad5-a746-42a5-b9e8-a9904ad242bc"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.174391 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b2acad5-a746-42a5-b9e8-a9904ad242bc-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "7b2acad5-a746-42a5-b9e8-a9904ad242bc" (UID: "7b2acad5-a746-42a5-b9e8-a9904ad242bc"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.177802 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/7b2acad5-a746-42a5-b9e8-a9904ad242bc-pod-info" (OuterVolumeSpecName: "pod-info") pod "7b2acad5-a746-42a5-b9e8-a9904ad242bc" (UID: "7b2acad5-a746-42a5-b9e8-a9904ad242bc"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.179811 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b2acad5-a746-42a5-b9e8-a9904ad242bc-kube-api-access-pfxbs" (OuterVolumeSpecName: "kube-api-access-pfxbs") pod "7b2acad5-a746-42a5-b9e8-a9904ad242bc" (UID: "7b2acad5-a746-42a5-b9e8-a9904ad242bc"). InnerVolumeSpecName "kube-api-access-pfxbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.181375 4661 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.181407 4661 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7b2acad5-a746-42a5-b9e8-a9904ad242bc-pod-info\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.181421 4661 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7b2acad5-a746-42a5-b9e8-a9904ad242bc-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.181434 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfxbs\" (UniqueName: \"kubernetes.io/projected/7b2acad5-a746-42a5-b9e8-a9904ad242bc-kube-api-access-pfxbs\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.181397 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "7b2acad5-a746-42a5-b9e8-a9904ad242bc" (UID: "7b2acad5-a746-42a5-b9e8-a9904ad242bc"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.182436 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "7b2acad5-a746-42a5-b9e8-a9904ad242bc" (UID: "7b2acad5-a746-42a5-b9e8-a9904ad242bc"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.183903 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b2acad5-a746-42a5-b9e8-a9904ad242bc-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "7b2acad5-a746-42a5-b9e8-a9904ad242bc" (UID: "7b2acad5-a746-42a5-b9e8-a9904ad242bc"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.209792 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "7b2acad5-a746-42a5-b9e8-a9904ad242bc" (UID: "7b2acad5-a746-42a5-b9e8-a9904ad242bc"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.229719 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b2acad5-a746-42a5-b9e8-a9904ad242bc-config-data" (OuterVolumeSpecName: "config-data") pod "7b2acad5-a746-42a5-b9e8-a9904ad242bc" (UID: "7b2acad5-a746-42a5-b9e8-a9904ad242bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.272382 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b2acad5-a746-42a5-b9e8-a9904ad242bc-server-conf" (OuterVolumeSpecName: "server-conf") pod "7b2acad5-a746-42a5-b9e8-a9904ad242bc" (UID: "7b2acad5-a746-42a5-b9e8-a9904ad242bc"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.286470 4661 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.286503 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7b2acad5-a746-42a5-b9e8-a9904ad242bc-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.286513 4661 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7b2acad5-a746-42a5-b9e8-a9904ad242bc-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.286524 4661 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.286533 4661 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.286541 4661 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7b2acad5-a746-42a5-b9e8-a9904ad242bc-server-conf\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.310214 4661 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.348809 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "7b2acad5-a746-42a5-b9e8-a9904ad242bc" (UID: "7b2acad5-a746-42a5-b9e8-a9904ad242bc"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.387955 4661 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:12 crc kubenswrapper[4661]: I1001 05:51:12.387992 4661 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7b2acad5-a746-42a5-b9e8-a9904ad242bc-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.045605 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7b2acad5-a746-42a5-b9e8-a9904ad242bc","Type":"ContainerDied","Data":"a2ccef982f76a0faf7b5093c5021207aab6c5251d62c92673096569f62dcaadf"} Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.045704 4661 scope.go:117] "RemoveContainer" containerID="cee8f1a11352270182d3bca4ac8feb700d7870c9f94e70f1687b9bafc2489739" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.046689 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.048741 4661 generic.go:334] "Generic (PLEG): container finished" podID="31336b4a-1953-44ab-b229-401a3a3ac031" containerID="2ada95fe4bda404b5dd58857619ad25cdc3c3801d25f51ee4e55f0ba0fc2deea" exitCode=0 Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.048783 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"31336b4a-1953-44ab-b229-401a3a3ac031","Type":"ContainerDied","Data":"2ada95fe4bda404b5dd58857619ad25cdc3c3801d25f51ee4e55f0ba0fc2deea"} Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.048807 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"31336b4a-1953-44ab-b229-401a3a3ac031","Type":"ContainerDied","Data":"6a0277fb2b76af082657120ed26d867ca9fd948146d4466fd81f71ace3603542"} Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.048817 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a0277fb2b76af082657120ed26d867ca9fd948146d4466fd81f71ace3603542" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.049484 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.083980 4661 scope.go:117] "RemoveContainer" containerID="039dfc87fb55da52d83c66c05a25ea5859a3d7bdb5cf40fe94cb117e3a2ca1d2" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.111590 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-tls\") pod \"31336b4a-1953-44ab-b229-401a3a3ac031\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.111736 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/31336b4a-1953-44ab-b229-401a3a3ac031-plugins-conf\") pod \"31336b4a-1953-44ab-b229-401a3a3ac031\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.111767 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"31336b4a-1953-44ab-b229-401a3a3ac031\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.111797 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-erlang-cookie\") pod \"31336b4a-1953-44ab-b229-401a3a3ac031\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.111836 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-confd\") pod \"31336b4a-1953-44ab-b229-401a3a3ac031\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.111934 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/31336b4a-1953-44ab-b229-401a3a3ac031-server-conf\") pod \"31336b4a-1953-44ab-b229-401a3a3ac031\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.111967 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/31336b4a-1953-44ab-b229-401a3a3ac031-config-data\") pod \"31336b4a-1953-44ab-b229-401a3a3ac031\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.111999 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-plugins\") pod \"31336b4a-1953-44ab-b229-401a3a3ac031\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.112019 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8xmk5\" (UniqueName: \"kubernetes.io/projected/31336b4a-1953-44ab-b229-401a3a3ac031-kube-api-access-8xmk5\") pod \"31336b4a-1953-44ab-b229-401a3a3ac031\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.117244 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31336b4a-1953-44ab-b229-401a3a3ac031-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "31336b4a-1953-44ab-b229-401a3a3ac031" (UID: "31336b4a-1953-44ab-b229-401a3a3ac031"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.117801 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "31336b4a-1953-44ab-b229-401a3a3ac031" (UID: "31336b4a-1953-44ab-b229-401a3a3ac031"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.118857 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.120409 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/31336b4a-1953-44ab-b229-401a3a3ac031-erlang-cookie-secret\") pod \"31336b4a-1953-44ab-b229-401a3a3ac031\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.123293 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/31336b4a-1953-44ab-b229-401a3a3ac031-pod-info\") pod \"31336b4a-1953-44ab-b229-401a3a3ac031\" (UID: \"31336b4a-1953-44ab-b229-401a3a3ac031\") " Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.124115 4661 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/31336b4a-1953-44ab-b229-401a3a3ac031-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.124180 4661 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.127304 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31336b4a-1953-44ab-b229-401a3a3ac031-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "31336b4a-1953-44ab-b229-401a3a3ac031" (UID: "31336b4a-1953-44ab-b229-401a3a3ac031"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.129355 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/31336b4a-1953-44ab-b229-401a3a3ac031-pod-info" (OuterVolumeSpecName: "pod-info") pod "31336b4a-1953-44ab-b229-401a3a3ac031" (UID: "31336b4a-1953-44ab-b229-401a3a3ac031"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.131288 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31336b4a-1953-44ab-b229-401a3a3ac031-kube-api-access-8xmk5" (OuterVolumeSpecName: "kube-api-access-8xmk5") pod "31336b4a-1953-44ab-b229-401a3a3ac031" (UID: "31336b4a-1953-44ab-b229-401a3a3ac031"). InnerVolumeSpecName "kube-api-access-8xmk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.131489 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "31336b4a-1953-44ab-b229-401a3a3ac031" (UID: "31336b4a-1953-44ab-b229-401a3a3ac031"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.134125 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "persistence") pod "31336b4a-1953-44ab-b229-401a3a3ac031" (UID: "31336b4a-1953-44ab-b229-401a3a3ac031"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.148388 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.155833 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "31336b4a-1953-44ab-b229-401a3a3ac031" (UID: "31336b4a-1953-44ab-b229-401a3a3ac031"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.162894 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 05:51:13 crc kubenswrapper[4661]: E1001 05:51:13.163438 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b2acad5-a746-42a5-b9e8-a9904ad242bc" containerName="setup-container" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.163455 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b2acad5-a746-42a5-b9e8-a9904ad242bc" containerName="setup-container" Oct 01 05:51:13 crc kubenswrapper[4661]: E1001 05:51:13.163480 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b2acad5-a746-42a5-b9e8-a9904ad242bc" containerName="rabbitmq" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.163487 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b2acad5-a746-42a5-b9e8-a9904ad242bc" containerName="rabbitmq" Oct 01 05:51:13 crc kubenswrapper[4661]: E1001 05:51:13.163521 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31336b4a-1953-44ab-b229-401a3a3ac031" containerName="rabbitmq" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.163528 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="31336b4a-1953-44ab-b229-401a3a3ac031" containerName="rabbitmq" Oct 01 05:51:13 crc kubenswrapper[4661]: E1001 05:51:13.163545 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31336b4a-1953-44ab-b229-401a3a3ac031" containerName="setup-container" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.163561 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="31336b4a-1953-44ab-b229-401a3a3ac031" containerName="setup-container" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.164212 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b2acad5-a746-42a5-b9e8-a9904ad242bc" containerName="rabbitmq" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.164235 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="31336b4a-1953-44ab-b229-401a3a3ac031" containerName="rabbitmq" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.166300 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.171682 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.176051 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.179065 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.179381 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.179574 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.179797 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.179970 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-x9srl" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.180723 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.237519 4661 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.237547 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8xmk5\" (UniqueName: \"kubernetes.io/projected/31336b4a-1953-44ab-b229-401a3a3ac031-kube-api-access-8xmk5\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.237559 4661 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/31336b4a-1953-44ab-b229-401a3a3ac031-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.237568 4661 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/31336b4a-1953-44ab-b229-401a3a3ac031-pod-info\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.237576 4661 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.237596 4661 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.287828 4661 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.294429 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31336b4a-1953-44ab-b229-401a3a3ac031-config-data" (OuterVolumeSpecName: "config-data") pod "31336b4a-1953-44ab-b229-401a3a3ac031" (UID: "31336b4a-1953-44ab-b229-401a3a3ac031"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.304969 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31336b4a-1953-44ab-b229-401a3a3ac031-server-conf" (OuterVolumeSpecName: "server-conf") pod "31336b4a-1953-44ab-b229-401a3a3ac031" (UID: "31336b4a-1953-44ab-b229-401a3a3ac031"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.344477 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.344540 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c9a455c2-0405-4416-a367-c34353ee3fa3-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.344561 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c9a455c2-0405-4416-a367-c34353ee3fa3-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.344601 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gwrt\" (UniqueName: \"kubernetes.io/projected/c9a455c2-0405-4416-a367-c34353ee3fa3-kube-api-access-8gwrt\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.344643 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c9a455c2-0405-4416-a367-c34353ee3fa3-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.344669 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c9a455c2-0405-4416-a367-c34353ee3fa3-config-data\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.344686 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c9a455c2-0405-4416-a367-c34353ee3fa3-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.344701 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c9a455c2-0405-4416-a367-c34353ee3fa3-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.344724 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c9a455c2-0405-4416-a367-c34353ee3fa3-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.344742 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c9a455c2-0405-4416-a367-c34353ee3fa3-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.344793 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c9a455c2-0405-4416-a367-c34353ee3fa3-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.344838 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/31336b4a-1953-44ab-b229-401a3a3ac031-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.344847 4661 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.344856 4661 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/31336b4a-1953-44ab-b229-401a3a3ac031-server-conf\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.377196 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "31336b4a-1953-44ab-b229-401a3a3ac031" (UID: "31336b4a-1953-44ab-b229-401a3a3ac031"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.446272 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c9a455c2-0405-4416-a367-c34353ee3fa3-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.446317 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c9a455c2-0405-4416-a367-c34353ee3fa3-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.446372 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c9a455c2-0405-4416-a367-c34353ee3fa3-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.446415 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.446443 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c9a455c2-0405-4416-a367-c34353ee3fa3-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.446463 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c9a455c2-0405-4416-a367-c34353ee3fa3-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.446500 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gwrt\" (UniqueName: \"kubernetes.io/projected/c9a455c2-0405-4416-a367-c34353ee3fa3-kube-api-access-8gwrt\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.446526 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c9a455c2-0405-4416-a367-c34353ee3fa3-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.446549 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c9a455c2-0405-4416-a367-c34353ee3fa3-config-data\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.446564 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c9a455c2-0405-4416-a367-c34353ee3fa3-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.446581 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c9a455c2-0405-4416-a367-c34353ee3fa3-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.446626 4661 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/31336b4a-1953-44ab-b229-401a3a3ac031-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.447053 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c9a455c2-0405-4416-a367-c34353ee3fa3-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.447413 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c9a455c2-0405-4416-a367-c34353ee3fa3-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.447767 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c9a455c2-0405-4416-a367-c34353ee3fa3-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.448020 4661 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.448248 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c9a455c2-0405-4416-a367-c34353ee3fa3-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.448773 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c9a455c2-0405-4416-a367-c34353ee3fa3-config-data\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.450339 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c9a455c2-0405-4416-a367-c34353ee3fa3-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.450853 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c9a455c2-0405-4416-a367-c34353ee3fa3-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.453921 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c9a455c2-0405-4416-a367-c34353ee3fa3-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.454466 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c9a455c2-0405-4416-a367-c34353ee3fa3-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.462030 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gwrt\" (UniqueName: \"kubernetes.io/projected/c9a455c2-0405-4416-a367-c34353ee3fa3-kube-api-access-8gwrt\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.493202 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"c9a455c2-0405-4416-a367-c34353ee3fa3\") " pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.513403 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 01 05:51:13 crc kubenswrapper[4661]: I1001 05:51:13.774294 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b2acad5-a746-42a5-b9e8-a9904ad242bc" path="/var/lib/kubelet/pods/7b2acad5-a746-42a5-b9e8-a9904ad242bc/volumes" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.049237 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.068938 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c9a455c2-0405-4416-a367-c34353ee3fa3","Type":"ContainerStarted","Data":"19fda975d3de5baebf164f4106f53c6b7323b03762d2d59c00ae77ca40cc934b"} Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.077799 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.219970 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.239147 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.255909 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.262179 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.269053 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.269653 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.269789 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.269914 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.270026 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.270218 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-tsxss" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.270488 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.272182 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.369711 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/066b014c-15b7-49e3-9f01-b758855ca8a2-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.369772 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.369851 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/066b014c-15b7-49e3-9f01-b758855ca8a2-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.369998 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c62b4\" (UniqueName: \"kubernetes.io/projected/066b014c-15b7-49e3-9f01-b758855ca8a2-kube-api-access-c62b4\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.370065 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/066b014c-15b7-49e3-9f01-b758855ca8a2-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.370106 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/066b014c-15b7-49e3-9f01-b758855ca8a2-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.370356 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/066b014c-15b7-49e3-9f01-b758855ca8a2-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.370542 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/066b014c-15b7-49e3-9f01-b758855ca8a2-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.370602 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/066b014c-15b7-49e3-9f01-b758855ca8a2-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.370626 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/066b014c-15b7-49e3-9f01-b758855ca8a2-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.370831 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/066b014c-15b7-49e3-9f01-b758855ca8a2-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.472092 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/066b014c-15b7-49e3-9f01-b758855ca8a2-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.472157 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/066b014c-15b7-49e3-9f01-b758855ca8a2-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.472179 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/066b014c-15b7-49e3-9f01-b758855ca8a2-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.472224 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/066b014c-15b7-49e3-9f01-b758855ca8a2-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.472251 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/066b014c-15b7-49e3-9f01-b758855ca8a2-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.472274 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.472301 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/066b014c-15b7-49e3-9f01-b758855ca8a2-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.472334 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c62b4\" (UniqueName: \"kubernetes.io/projected/066b014c-15b7-49e3-9f01-b758855ca8a2-kube-api-access-c62b4\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.472356 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/066b014c-15b7-49e3-9f01-b758855ca8a2-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.472375 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/066b014c-15b7-49e3-9f01-b758855ca8a2-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.472433 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/066b014c-15b7-49e3-9f01-b758855ca8a2-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.473124 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/066b014c-15b7-49e3-9f01-b758855ca8a2-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.473718 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/066b014c-15b7-49e3-9f01-b758855ca8a2-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.473845 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/066b014c-15b7-49e3-9f01-b758855ca8a2-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.474376 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/066b014c-15b7-49e3-9f01-b758855ca8a2-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.474456 4661 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.474453 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/066b014c-15b7-49e3-9f01-b758855ca8a2-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.477954 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/066b014c-15b7-49e3-9f01-b758855ca8a2-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.480221 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/066b014c-15b7-49e3-9f01-b758855ca8a2-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.480521 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/066b014c-15b7-49e3-9f01-b758855ca8a2-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.481182 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/066b014c-15b7-49e3-9f01-b758855ca8a2-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.495240 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c62b4\" (UniqueName: \"kubernetes.io/projected/066b014c-15b7-49e3-9f01-b758855ca8a2-kube-api-access-c62b4\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.538608 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"066b014c-15b7-49e3-9f01-b758855ca8a2\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:14 crc kubenswrapper[4661]: I1001 05:51:14.588050 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:15 crc kubenswrapper[4661]: I1001 05:51:15.112400 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 05:51:15 crc kubenswrapper[4661]: W1001 05:51:15.121142 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod066b014c_15b7_49e3_9f01_b758855ca8a2.slice/crio-b454aa586cf168d9aef57c6a7d86b05cf5054887e4a83a80b1353bb1f27fa595 WatchSource:0}: Error finding container b454aa586cf168d9aef57c6a7d86b05cf5054887e4a83a80b1353bb1f27fa595: Status 404 returned error can't find the container with id b454aa586cf168d9aef57c6a7d86b05cf5054887e4a83a80b1353bb1f27fa595 Oct 01 05:51:15 crc kubenswrapper[4661]: I1001 05:51:15.774270 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31336b4a-1953-44ab-b229-401a3a3ac031" path="/var/lib/kubelet/pods/31336b4a-1953-44ab-b229-401a3a3ac031/volumes" Oct 01 05:51:16 crc kubenswrapper[4661]: I1001 05:51:16.102893 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c9a455c2-0405-4416-a367-c34353ee3fa3","Type":"ContainerStarted","Data":"8c035a36f023d177b3b614f92dbfd9a91f4b99193542d3321437bd36420e7c66"} Oct 01 05:51:16 crc kubenswrapper[4661]: I1001 05:51:16.106404 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"066b014c-15b7-49e3-9f01-b758855ca8a2","Type":"ContainerStarted","Data":"b454aa586cf168d9aef57c6a7d86b05cf5054887e4a83a80b1353bb1f27fa595"} Oct 01 05:51:17 crc kubenswrapper[4661]: I1001 05:51:17.119742 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"066b014c-15b7-49e3-9f01-b758855ca8a2","Type":"ContainerStarted","Data":"6f2a3f49f7a4a9916b8c738eb04ca0a0bd0da907d155613aaffabf1b83d1cef8"} Oct 01 05:51:21 crc kubenswrapper[4661]: I1001 05:51:21.963175 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bf6c7df67-7nxz7"] Oct 01 05:51:21 crc kubenswrapper[4661]: I1001 05:51:21.967022 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:21 crc kubenswrapper[4661]: I1001 05:51:21.968973 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Oct 01 05:51:21 crc kubenswrapper[4661]: I1001 05:51:21.989581 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bf6c7df67-7nxz7"] Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.049171 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-ovsdbserver-nb\") pod \"dnsmasq-dns-bf6c7df67-7nxz7\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.049219 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-openstack-edpm-ipam\") pod \"dnsmasq-dns-bf6c7df67-7nxz7\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.049293 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-config\") pod \"dnsmasq-dns-bf6c7df67-7nxz7\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.049317 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-dns-svc\") pod \"dnsmasq-dns-bf6c7df67-7nxz7\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.049336 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-dns-swift-storage-0\") pod \"dnsmasq-dns-bf6c7df67-7nxz7\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.049383 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-ovsdbserver-sb\") pod \"dnsmasq-dns-bf6c7df67-7nxz7\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.049412 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4xd5\" (UniqueName: \"kubernetes.io/projected/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-kube-api-access-w4xd5\") pod \"dnsmasq-dns-bf6c7df67-7nxz7\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.150961 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-config\") pod \"dnsmasq-dns-bf6c7df67-7nxz7\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.151139 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-dns-svc\") pod \"dnsmasq-dns-bf6c7df67-7nxz7\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.151221 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-dns-swift-storage-0\") pod \"dnsmasq-dns-bf6c7df67-7nxz7\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.151331 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-ovsdbserver-sb\") pod \"dnsmasq-dns-bf6c7df67-7nxz7\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.151442 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4xd5\" (UniqueName: \"kubernetes.io/projected/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-kube-api-access-w4xd5\") pod \"dnsmasq-dns-bf6c7df67-7nxz7\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.151542 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-ovsdbserver-nb\") pod \"dnsmasq-dns-bf6c7df67-7nxz7\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.151612 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-openstack-edpm-ipam\") pod \"dnsmasq-dns-bf6c7df67-7nxz7\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.152161 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-dns-svc\") pod \"dnsmasq-dns-bf6c7df67-7nxz7\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.152234 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-ovsdbserver-sb\") pod \"dnsmasq-dns-bf6c7df67-7nxz7\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.152250 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-dns-swift-storage-0\") pod \"dnsmasq-dns-bf6c7df67-7nxz7\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.152259 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-config\") pod \"dnsmasq-dns-bf6c7df67-7nxz7\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.152525 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-openstack-edpm-ipam\") pod \"dnsmasq-dns-bf6c7df67-7nxz7\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.152714 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-ovsdbserver-nb\") pod \"dnsmasq-dns-bf6c7df67-7nxz7\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.177287 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4xd5\" (UniqueName: \"kubernetes.io/projected/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-kube-api-access-w4xd5\") pod \"dnsmasq-dns-bf6c7df67-7nxz7\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.315345 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:22 crc kubenswrapper[4661]: I1001 05:51:22.844060 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bf6c7df67-7nxz7"] Oct 01 05:51:23 crc kubenswrapper[4661]: I1001 05:51:23.188958 4661 generic.go:334] "Generic (PLEG): container finished" podID="d3279dc0-359c-4c2b-99cc-0cc6befb04ee" containerID="1d22f182a5cdacbb096242e38b134b0654cf3241e65e567d6a97282a861a88c5" exitCode=0 Oct 01 05:51:23 crc kubenswrapper[4661]: I1001 05:51:23.188990 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" event={"ID":"d3279dc0-359c-4c2b-99cc-0cc6befb04ee","Type":"ContainerDied","Data":"1d22f182a5cdacbb096242e38b134b0654cf3241e65e567d6a97282a861a88c5"} Oct 01 05:51:23 crc kubenswrapper[4661]: I1001 05:51:23.189252 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" event={"ID":"d3279dc0-359c-4c2b-99cc-0cc6befb04ee","Type":"ContainerStarted","Data":"dbb8bdd6fb2b4a2d029972544b6bbf73af5d79e04cb757598320d6fb2d41aa18"} Oct 01 05:51:24 crc kubenswrapper[4661]: I1001 05:51:24.210514 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" event={"ID":"d3279dc0-359c-4c2b-99cc-0cc6befb04ee","Type":"ContainerStarted","Data":"ee7d63898d6dd2b08a3b912f3ba485c26bdb6928c8d34e33485f373641fc4eb4"} Oct 01 05:51:24 crc kubenswrapper[4661]: I1001 05:51:24.211214 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:24 crc kubenswrapper[4661]: I1001 05:51:24.235239 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" podStartSLOduration=3.235219848 podStartE2EDuration="3.235219848s" podCreationTimestamp="2025-10-01 05:51:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:51:24.233806109 +0000 UTC m=+1333.171784723" watchObservedRunningTime="2025-10-01 05:51:24.235219848 +0000 UTC m=+1333.173198462" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.317889 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.412618 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54599d8f7-s42qj"] Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.412885 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-54599d8f7-s42qj" podUID="d15989a0-c753-4158-a901-c018777e7560" containerName="dnsmasq-dns" containerID="cri-o://2474e19919406a94c6f89f0a9558dc966372c704f620adaca4e7817165f0480f" gracePeriod=10 Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.615700 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77b58f4b85-pvpg8"] Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.617492 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.630211 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77b58f4b85-pvpg8"] Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.686491 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3f69f735-866d-4ab5-9ef4-f940c2cc2ee5-dns-swift-storage-0\") pod \"dnsmasq-dns-77b58f4b85-pvpg8\" (UID: \"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5\") " pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.686602 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkhqr\" (UniqueName: \"kubernetes.io/projected/3f69f735-866d-4ab5-9ef4-f940c2cc2ee5-kube-api-access-zkhqr\") pod \"dnsmasq-dns-77b58f4b85-pvpg8\" (UID: \"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5\") " pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.686671 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/3f69f735-866d-4ab5-9ef4-f940c2cc2ee5-openstack-edpm-ipam\") pod \"dnsmasq-dns-77b58f4b85-pvpg8\" (UID: \"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5\") " pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.686695 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f69f735-866d-4ab5-9ef4-f940c2cc2ee5-dns-svc\") pod \"dnsmasq-dns-77b58f4b85-pvpg8\" (UID: \"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5\") " pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.686929 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f69f735-866d-4ab5-9ef4-f940c2cc2ee5-config\") pod \"dnsmasq-dns-77b58f4b85-pvpg8\" (UID: \"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5\") " pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.687066 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3f69f735-866d-4ab5-9ef4-f940c2cc2ee5-ovsdbserver-sb\") pod \"dnsmasq-dns-77b58f4b85-pvpg8\" (UID: \"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5\") " pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.687124 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f69f735-866d-4ab5-9ef4-f940c2cc2ee5-ovsdbserver-nb\") pod \"dnsmasq-dns-77b58f4b85-pvpg8\" (UID: \"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5\") " pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.788079 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3f69f735-866d-4ab5-9ef4-f940c2cc2ee5-ovsdbserver-sb\") pod \"dnsmasq-dns-77b58f4b85-pvpg8\" (UID: \"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5\") " pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.788125 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f69f735-866d-4ab5-9ef4-f940c2cc2ee5-ovsdbserver-nb\") pod \"dnsmasq-dns-77b58f4b85-pvpg8\" (UID: \"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5\") " pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.788203 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3f69f735-866d-4ab5-9ef4-f940c2cc2ee5-dns-swift-storage-0\") pod \"dnsmasq-dns-77b58f4b85-pvpg8\" (UID: \"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5\") " pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.788241 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkhqr\" (UniqueName: \"kubernetes.io/projected/3f69f735-866d-4ab5-9ef4-f940c2cc2ee5-kube-api-access-zkhqr\") pod \"dnsmasq-dns-77b58f4b85-pvpg8\" (UID: \"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5\") " pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.788281 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/3f69f735-866d-4ab5-9ef4-f940c2cc2ee5-openstack-edpm-ipam\") pod \"dnsmasq-dns-77b58f4b85-pvpg8\" (UID: \"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5\") " pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.788297 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f69f735-866d-4ab5-9ef4-f940c2cc2ee5-dns-svc\") pod \"dnsmasq-dns-77b58f4b85-pvpg8\" (UID: \"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5\") " pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.788359 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f69f735-866d-4ab5-9ef4-f940c2cc2ee5-config\") pod \"dnsmasq-dns-77b58f4b85-pvpg8\" (UID: \"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5\") " pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.789167 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f69f735-866d-4ab5-9ef4-f940c2cc2ee5-config\") pod \"dnsmasq-dns-77b58f4b85-pvpg8\" (UID: \"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5\") " pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.789692 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3f69f735-866d-4ab5-9ef4-f940c2cc2ee5-ovsdbserver-sb\") pod \"dnsmasq-dns-77b58f4b85-pvpg8\" (UID: \"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5\") " pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.790196 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f69f735-866d-4ab5-9ef4-f940c2cc2ee5-ovsdbserver-nb\") pod \"dnsmasq-dns-77b58f4b85-pvpg8\" (UID: \"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5\") " pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.791971 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3f69f735-866d-4ab5-9ef4-f940c2cc2ee5-dns-swift-storage-0\") pod \"dnsmasq-dns-77b58f4b85-pvpg8\" (UID: \"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5\") " pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.793323 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/3f69f735-866d-4ab5-9ef4-f940c2cc2ee5-openstack-edpm-ipam\") pod \"dnsmasq-dns-77b58f4b85-pvpg8\" (UID: \"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5\") " pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.793714 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f69f735-866d-4ab5-9ef4-f940c2cc2ee5-dns-svc\") pod \"dnsmasq-dns-77b58f4b85-pvpg8\" (UID: \"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5\") " pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.812469 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkhqr\" (UniqueName: \"kubernetes.io/projected/3f69f735-866d-4ab5-9ef4-f940c2cc2ee5-kube-api-access-zkhqr\") pod \"dnsmasq-dns-77b58f4b85-pvpg8\" (UID: \"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5\") " pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.931494 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.950263 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.991793 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9scp\" (UniqueName: \"kubernetes.io/projected/d15989a0-c753-4158-a901-c018777e7560-kube-api-access-k9scp\") pod \"d15989a0-c753-4158-a901-c018777e7560\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.995329 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-dns-swift-storage-0\") pod \"d15989a0-c753-4158-a901-c018777e7560\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.995354 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-ovsdbserver-nb\") pod \"d15989a0-c753-4158-a901-c018777e7560\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.995486 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-dns-svc\") pod \"d15989a0-c753-4158-a901-c018777e7560\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.995513 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-config\") pod \"d15989a0-c753-4158-a901-c018777e7560\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.995538 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-ovsdbserver-sb\") pod \"d15989a0-c753-4158-a901-c018777e7560\" (UID: \"d15989a0-c753-4158-a901-c018777e7560\") " Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.995655 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d15989a0-c753-4158-a901-c018777e7560-kube-api-access-k9scp" (OuterVolumeSpecName: "kube-api-access-k9scp") pod "d15989a0-c753-4158-a901-c018777e7560" (UID: "d15989a0-c753-4158-a901-c018777e7560"). InnerVolumeSpecName "kube-api-access-k9scp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:51:32 crc kubenswrapper[4661]: I1001 05:51:32.997427 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9scp\" (UniqueName: \"kubernetes.io/projected/d15989a0-c753-4158-a901-c018777e7560-kube-api-access-k9scp\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.069030 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-config" (OuterVolumeSpecName: "config") pod "d15989a0-c753-4158-a901-c018777e7560" (UID: "d15989a0-c753-4158-a901-c018777e7560"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.071248 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d15989a0-c753-4158-a901-c018777e7560" (UID: "d15989a0-c753-4158-a901-c018777e7560"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.071989 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d15989a0-c753-4158-a901-c018777e7560" (UID: "d15989a0-c753-4158-a901-c018777e7560"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.080334 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d15989a0-c753-4158-a901-c018777e7560" (UID: "d15989a0-c753-4158-a901-c018777e7560"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.099894 4661 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.100172 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.100231 4661 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.100283 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.102855 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d15989a0-c753-4158-a901-c018777e7560" (UID: "d15989a0-c753-4158-a901-c018777e7560"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.202518 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d15989a0-c753-4158-a901-c018777e7560-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.345537 4661 generic.go:334] "Generic (PLEG): container finished" podID="d15989a0-c753-4158-a901-c018777e7560" containerID="2474e19919406a94c6f89f0a9558dc966372c704f620adaca4e7817165f0480f" exitCode=0 Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.345584 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54599d8f7-s42qj" event={"ID":"d15989a0-c753-4158-a901-c018777e7560","Type":"ContainerDied","Data":"2474e19919406a94c6f89f0a9558dc966372c704f620adaca4e7817165f0480f"} Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.345656 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54599d8f7-s42qj" event={"ID":"d15989a0-c753-4158-a901-c018777e7560","Type":"ContainerDied","Data":"4dfaa5c0e41e2b75f086ad8fea794ddaed124e8ae60544e1f0c86c9cc3efdbb6"} Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.345670 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54599d8f7-s42qj" Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.345711 4661 scope.go:117] "RemoveContainer" containerID="2474e19919406a94c6f89f0a9558dc966372c704f620adaca4e7817165f0480f" Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.391105 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54599d8f7-s42qj"] Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.399711 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-54599d8f7-s42qj"] Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.403079 4661 scope.go:117] "RemoveContainer" containerID="8959cbdd5ea3e2a2fd9b24f4e46966e02f067bfcb65e4265b0b799edb5a0fd28" Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.436532 4661 scope.go:117] "RemoveContainer" containerID="2474e19919406a94c6f89f0a9558dc966372c704f620adaca4e7817165f0480f" Oct 01 05:51:33 crc kubenswrapper[4661]: E1001 05:51:33.439929 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2474e19919406a94c6f89f0a9558dc966372c704f620adaca4e7817165f0480f\": container with ID starting with 2474e19919406a94c6f89f0a9558dc966372c704f620adaca4e7817165f0480f not found: ID does not exist" containerID="2474e19919406a94c6f89f0a9558dc966372c704f620adaca4e7817165f0480f" Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.440000 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2474e19919406a94c6f89f0a9558dc966372c704f620adaca4e7817165f0480f"} err="failed to get container status \"2474e19919406a94c6f89f0a9558dc966372c704f620adaca4e7817165f0480f\": rpc error: code = NotFound desc = could not find container \"2474e19919406a94c6f89f0a9558dc966372c704f620adaca4e7817165f0480f\": container with ID starting with 2474e19919406a94c6f89f0a9558dc966372c704f620adaca4e7817165f0480f not found: ID does not exist" Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.440041 4661 scope.go:117] "RemoveContainer" containerID="8959cbdd5ea3e2a2fd9b24f4e46966e02f067bfcb65e4265b0b799edb5a0fd28" Oct 01 05:51:33 crc kubenswrapper[4661]: E1001 05:51:33.446789 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8959cbdd5ea3e2a2fd9b24f4e46966e02f067bfcb65e4265b0b799edb5a0fd28\": container with ID starting with 8959cbdd5ea3e2a2fd9b24f4e46966e02f067bfcb65e4265b0b799edb5a0fd28 not found: ID does not exist" containerID="8959cbdd5ea3e2a2fd9b24f4e46966e02f067bfcb65e4265b0b799edb5a0fd28" Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.446886 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8959cbdd5ea3e2a2fd9b24f4e46966e02f067bfcb65e4265b0b799edb5a0fd28"} err="failed to get container status \"8959cbdd5ea3e2a2fd9b24f4e46966e02f067bfcb65e4265b0b799edb5a0fd28\": rpc error: code = NotFound desc = could not find container \"8959cbdd5ea3e2a2fd9b24f4e46966e02f067bfcb65e4265b0b799edb5a0fd28\": container with ID starting with 8959cbdd5ea3e2a2fd9b24f4e46966e02f067bfcb65e4265b0b799edb5a0fd28 not found: ID does not exist" Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.453298 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77b58f4b85-pvpg8"] Oct 01 05:51:33 crc kubenswrapper[4661]: I1001 05:51:33.775883 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d15989a0-c753-4158-a901-c018777e7560" path="/var/lib/kubelet/pods/d15989a0-c753-4158-a901-c018777e7560/volumes" Oct 01 05:51:34 crc kubenswrapper[4661]: I1001 05:51:34.309067 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:51:34 crc kubenswrapper[4661]: I1001 05:51:34.309542 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:51:34 crc kubenswrapper[4661]: I1001 05:51:34.309613 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 05:51:34 crc kubenswrapper[4661]: I1001 05:51:34.310752 4661 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"84a53173a3a65408f44fe54c0406b8dc34eed2d6cf16c9dfba93d9e6d05a09bf"} pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 05:51:34 crc kubenswrapper[4661]: I1001 05:51:34.310866 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" containerID="cri-o://84a53173a3a65408f44fe54c0406b8dc34eed2d6cf16c9dfba93d9e6d05a09bf" gracePeriod=600 Oct 01 05:51:34 crc kubenswrapper[4661]: I1001 05:51:34.366228 4661 generic.go:334] "Generic (PLEG): container finished" podID="3f69f735-866d-4ab5-9ef4-f940c2cc2ee5" containerID="4e68047593bd3e731ef0be7466799dc8d0997251a94934823651d60e330049dd" exitCode=0 Oct 01 05:51:34 crc kubenswrapper[4661]: I1001 05:51:34.366292 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" event={"ID":"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5","Type":"ContainerDied","Data":"4e68047593bd3e731ef0be7466799dc8d0997251a94934823651d60e330049dd"} Oct 01 05:51:34 crc kubenswrapper[4661]: I1001 05:51:34.366327 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" event={"ID":"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5","Type":"ContainerStarted","Data":"a2c844f04bdfd2212bcadfb2e5b2c8d60908848c73f93bb0ede93cd39ab5c97d"} Oct 01 05:51:35 crc kubenswrapper[4661]: I1001 05:51:35.384687 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" event={"ID":"3f69f735-866d-4ab5-9ef4-f940c2cc2ee5","Type":"ContainerStarted","Data":"36dbd41a08647987eed3746687f9278c84dac7450657afdf8665e614392406cd"} Oct 01 05:51:35 crc kubenswrapper[4661]: I1001 05:51:35.385516 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:35 crc kubenswrapper[4661]: I1001 05:51:35.389068 4661 generic.go:334] "Generic (PLEG): container finished" podID="7584c4bc-4202-487e-a2b4-4319f428a792" containerID="84a53173a3a65408f44fe54c0406b8dc34eed2d6cf16c9dfba93d9e6d05a09bf" exitCode=0 Oct 01 05:51:35 crc kubenswrapper[4661]: I1001 05:51:35.389121 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerDied","Data":"84a53173a3a65408f44fe54c0406b8dc34eed2d6cf16c9dfba93d9e6d05a09bf"} Oct 01 05:51:35 crc kubenswrapper[4661]: I1001 05:51:35.389165 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad"} Oct 01 05:51:35 crc kubenswrapper[4661]: I1001 05:51:35.389214 4661 scope.go:117] "RemoveContainer" containerID="7c6267fc47b44c8673e99a573506180a4e4a545631b58c429ea8f0fc9b008d0f" Oct 01 05:51:35 crc kubenswrapper[4661]: I1001 05:51:35.433475 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" podStartSLOduration=3.433454444 podStartE2EDuration="3.433454444s" podCreationTimestamp="2025-10-01 05:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:51:35.417052497 +0000 UTC m=+1344.355031151" watchObservedRunningTime="2025-10-01 05:51:35.433454444 +0000 UTC m=+1344.371433068" Oct 01 05:51:42 crc kubenswrapper[4661]: I1001 05:51:42.952810 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77b58f4b85-pvpg8" Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.049815 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bf6c7df67-7nxz7"] Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.050182 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" podUID="d3279dc0-359c-4c2b-99cc-0cc6befb04ee" containerName="dnsmasq-dns" containerID="cri-o://ee7d63898d6dd2b08a3b912f3ba485c26bdb6928c8d34e33485f373641fc4eb4" gracePeriod=10 Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.497528 4661 generic.go:334] "Generic (PLEG): container finished" podID="d3279dc0-359c-4c2b-99cc-0cc6befb04ee" containerID="ee7d63898d6dd2b08a3b912f3ba485c26bdb6928c8d34e33485f373641fc4eb4" exitCode=0 Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.497735 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" event={"ID":"d3279dc0-359c-4c2b-99cc-0cc6befb04ee","Type":"ContainerDied","Data":"ee7d63898d6dd2b08a3b912f3ba485c26bdb6928c8d34e33485f373641fc4eb4"} Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.617787 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.793276 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-config\") pod \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.793908 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-ovsdbserver-nb\") pod \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.793956 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-openstack-edpm-ipam\") pod \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.793994 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-dns-svc\") pod \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.794055 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-ovsdbserver-sb\") pod \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.794096 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd5\" (UniqueName: \"kubernetes.io/projected/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-kube-api-access-w4xd5\") pod \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.794119 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-dns-swift-storage-0\") pod \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\" (UID: \"d3279dc0-359c-4c2b-99cc-0cc6befb04ee\") " Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.808054 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-kube-api-access-w4xd5" (OuterVolumeSpecName: "kube-api-access-w4xd5") pod "d3279dc0-359c-4c2b-99cc-0cc6befb04ee" (UID: "d3279dc0-359c-4c2b-99cc-0cc6befb04ee"). InnerVolumeSpecName "kube-api-access-w4xd5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.856247 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d3279dc0-359c-4c2b-99cc-0cc6befb04ee" (UID: "d3279dc0-359c-4c2b-99cc-0cc6befb04ee"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.864163 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d3279dc0-359c-4c2b-99cc-0cc6befb04ee" (UID: "d3279dc0-359c-4c2b-99cc-0cc6befb04ee"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.869052 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d3279dc0-359c-4c2b-99cc-0cc6befb04ee" (UID: "d3279dc0-359c-4c2b-99cc-0cc6befb04ee"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.881482 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "d3279dc0-359c-4c2b-99cc-0cc6befb04ee" (UID: "d3279dc0-359c-4c2b-99cc-0cc6befb04ee"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.883286 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d3279dc0-359c-4c2b-99cc-0cc6befb04ee" (UID: "d3279dc0-359c-4c2b-99cc-0cc6befb04ee"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.896084 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd5\" (UniqueName: \"kubernetes.io/projected/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-kube-api-access-w4xd5\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.896117 4661 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.896129 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.896137 4661 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.896146 4661 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.896156 4661 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:43 crc kubenswrapper[4661]: I1001 05:51:43.909793 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-config" (OuterVolumeSpecName: "config") pod "d3279dc0-359c-4c2b-99cc-0cc6befb04ee" (UID: "d3279dc0-359c-4c2b-99cc-0cc6befb04ee"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 05:51:44 crc kubenswrapper[4661]: I1001 05:51:44.002542 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3279dc0-359c-4c2b-99cc-0cc6befb04ee-config\") on node \"crc\" DevicePath \"\"" Oct 01 05:51:44 crc kubenswrapper[4661]: I1001 05:51:44.508150 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" event={"ID":"d3279dc0-359c-4c2b-99cc-0cc6befb04ee","Type":"ContainerDied","Data":"dbb8bdd6fb2b4a2d029972544b6bbf73af5d79e04cb757598320d6fb2d41aa18"} Oct 01 05:51:44 crc kubenswrapper[4661]: I1001 05:51:44.508476 4661 scope.go:117] "RemoveContainer" containerID="ee7d63898d6dd2b08a3b912f3ba485c26bdb6928c8d34e33485f373641fc4eb4" Oct 01 05:51:44 crc kubenswrapper[4661]: I1001 05:51:44.508232 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bf6c7df67-7nxz7" Oct 01 05:51:44 crc kubenswrapper[4661]: I1001 05:51:44.538394 4661 scope.go:117] "RemoveContainer" containerID="1d22f182a5cdacbb096242e38b134b0654cf3241e65e567d6a97282a861a88c5" Oct 01 05:51:44 crc kubenswrapper[4661]: I1001 05:51:44.557744 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bf6c7df67-7nxz7"] Oct 01 05:51:44 crc kubenswrapper[4661]: I1001 05:51:44.569560 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bf6c7df67-7nxz7"] Oct 01 05:51:45 crc kubenswrapper[4661]: I1001 05:51:45.778202 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3279dc0-359c-4c2b-99cc-0cc6befb04ee" path="/var/lib/kubelet/pods/d3279dc0-359c-4c2b-99cc-0cc6befb04ee/volumes" Oct 01 05:51:49 crc kubenswrapper[4661]: I1001 05:51:49.575305 4661 generic.go:334] "Generic (PLEG): container finished" podID="c9a455c2-0405-4416-a367-c34353ee3fa3" containerID="8c035a36f023d177b3b614f92dbfd9a91f4b99193542d3321437bd36420e7c66" exitCode=0 Oct 01 05:51:49 crc kubenswrapper[4661]: I1001 05:51:49.575442 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c9a455c2-0405-4416-a367-c34353ee3fa3","Type":"ContainerDied","Data":"8c035a36f023d177b3b614f92dbfd9a91f4b99193542d3321437bd36420e7c66"} Oct 01 05:51:50 crc kubenswrapper[4661]: I1001 05:51:50.587566 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c9a455c2-0405-4416-a367-c34353ee3fa3","Type":"ContainerStarted","Data":"9e0f4c69a3de00de62cb4143374142e0814f6e2ad2abb891e2c98161bde5922e"} Oct 01 05:51:50 crc kubenswrapper[4661]: I1001 05:51:50.588181 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 01 05:51:50 crc kubenswrapper[4661]: I1001 05:51:50.590457 4661 generic.go:334] "Generic (PLEG): container finished" podID="066b014c-15b7-49e3-9f01-b758855ca8a2" containerID="6f2a3f49f7a4a9916b8c738eb04ca0a0bd0da907d155613aaffabf1b83d1cef8" exitCode=0 Oct 01 05:51:50 crc kubenswrapper[4661]: I1001 05:51:50.590514 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"066b014c-15b7-49e3-9f01-b758855ca8a2","Type":"ContainerDied","Data":"6f2a3f49f7a4a9916b8c738eb04ca0a0bd0da907d155613aaffabf1b83d1cef8"} Oct 01 05:51:50 crc kubenswrapper[4661]: I1001 05:51:50.636317 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.636278645 podStartE2EDuration="37.636278645s" podCreationTimestamp="2025-10-01 05:51:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:51:50.617118544 +0000 UTC m=+1359.555097178" watchObservedRunningTime="2025-10-01 05:51:50.636278645 +0000 UTC m=+1359.574257269" Oct 01 05:51:51 crc kubenswrapper[4661]: I1001 05:51:51.604415 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"066b014c-15b7-49e3-9f01-b758855ca8a2","Type":"ContainerStarted","Data":"28f65bce140b1915c049d2fb45ff992d55178da189780c7a556fde701cb911fa"} Oct 01 05:51:51 crc kubenswrapper[4661]: I1001 05:51:51.605135 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:51:51 crc kubenswrapper[4661]: I1001 05:51:51.650146 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.650117867 podStartE2EDuration="37.650117867s" podCreationTimestamp="2025-10-01 05:51:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 05:51:51.637927755 +0000 UTC m=+1360.575906369" watchObservedRunningTime="2025-10-01 05:51:51.650117867 +0000 UTC m=+1360.588096521" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.540974 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k"] Oct 01 05:52:01 crc kubenswrapper[4661]: E1001 05:52:01.542183 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3279dc0-359c-4c2b-99cc-0cc6befb04ee" containerName="dnsmasq-dns" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.542203 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3279dc0-359c-4c2b-99cc-0cc6befb04ee" containerName="dnsmasq-dns" Oct 01 05:52:01 crc kubenswrapper[4661]: E1001 05:52:01.542230 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d15989a0-c753-4158-a901-c018777e7560" containerName="init" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.542241 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="d15989a0-c753-4158-a901-c018777e7560" containerName="init" Oct 01 05:52:01 crc kubenswrapper[4661]: E1001 05:52:01.542264 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d15989a0-c753-4158-a901-c018777e7560" containerName="dnsmasq-dns" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.542276 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="d15989a0-c753-4158-a901-c018777e7560" containerName="dnsmasq-dns" Oct 01 05:52:01 crc kubenswrapper[4661]: E1001 05:52:01.542306 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3279dc0-359c-4c2b-99cc-0cc6befb04ee" containerName="init" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.542317 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3279dc0-359c-4c2b-99cc-0cc6befb04ee" containerName="init" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.542671 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="d15989a0-c753-4158-a901-c018777e7560" containerName="dnsmasq-dns" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.542724 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3279dc0-359c-4c2b-99cc-0cc6befb04ee" containerName="dnsmasq-dns" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.543879 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.546344 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.546742 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.547287 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-srk7f" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.547916 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.572345 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k"] Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.688374 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0c02866-25ee-4ef1-9bba-572422cabc26-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k\" (UID: \"a0c02866-25ee-4ef1-9bba-572422cabc26\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.688493 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvnll\" (UniqueName: \"kubernetes.io/projected/a0c02866-25ee-4ef1-9bba-572422cabc26-kube-api-access-hvnll\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k\" (UID: \"a0c02866-25ee-4ef1-9bba-572422cabc26\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.688568 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0c02866-25ee-4ef1-9bba-572422cabc26-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k\" (UID: \"a0c02866-25ee-4ef1-9bba-572422cabc26\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.688643 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0c02866-25ee-4ef1-9bba-572422cabc26-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k\" (UID: \"a0c02866-25ee-4ef1-9bba-572422cabc26\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.790422 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0c02866-25ee-4ef1-9bba-572422cabc26-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k\" (UID: \"a0c02866-25ee-4ef1-9bba-572422cabc26\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.790590 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0c02866-25ee-4ef1-9bba-572422cabc26-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k\" (UID: \"a0c02866-25ee-4ef1-9bba-572422cabc26\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.790681 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0c02866-25ee-4ef1-9bba-572422cabc26-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k\" (UID: \"a0c02866-25ee-4ef1-9bba-572422cabc26\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.790811 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvnll\" (UniqueName: \"kubernetes.io/projected/a0c02866-25ee-4ef1-9bba-572422cabc26-kube-api-access-hvnll\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k\" (UID: \"a0c02866-25ee-4ef1-9bba-572422cabc26\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.798352 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0c02866-25ee-4ef1-9bba-572422cabc26-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k\" (UID: \"a0c02866-25ee-4ef1-9bba-572422cabc26\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.798439 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0c02866-25ee-4ef1-9bba-572422cabc26-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k\" (UID: \"a0c02866-25ee-4ef1-9bba-572422cabc26\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.812595 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0c02866-25ee-4ef1-9bba-572422cabc26-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k\" (UID: \"a0c02866-25ee-4ef1-9bba-572422cabc26\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.814151 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvnll\" (UniqueName: \"kubernetes.io/projected/a0c02866-25ee-4ef1-9bba-572422cabc26-kube-api-access-hvnll\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k\" (UID: \"a0c02866-25ee-4ef1-9bba-572422cabc26\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k" Oct 01 05:52:01 crc kubenswrapper[4661]: I1001 05:52:01.876952 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k" Oct 01 05:52:02 crc kubenswrapper[4661]: I1001 05:52:02.457998 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k"] Oct 01 05:52:02 crc kubenswrapper[4661]: W1001 05:52:02.461234 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0c02866_25ee_4ef1_9bba_572422cabc26.slice/crio-ab76b6d3befb9099a8bb9f8483f48ca3d4b4b49499c6d08522958acfa0faf820 WatchSource:0}: Error finding container ab76b6d3befb9099a8bb9f8483f48ca3d4b4b49499c6d08522958acfa0faf820: Status 404 returned error can't find the container with id ab76b6d3befb9099a8bb9f8483f48ca3d4b4b49499c6d08522958acfa0faf820 Oct 01 05:52:02 crc kubenswrapper[4661]: I1001 05:52:02.728395 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k" event={"ID":"a0c02866-25ee-4ef1-9bba-572422cabc26","Type":"ContainerStarted","Data":"ab76b6d3befb9099a8bb9f8483f48ca3d4b4b49499c6d08522958acfa0faf820"} Oct 01 05:52:03 crc kubenswrapper[4661]: I1001 05:52:03.517792 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 01 05:52:04 crc kubenswrapper[4661]: I1001 05:52:04.592823 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 01 05:52:12 crc kubenswrapper[4661]: I1001 05:52:12.009400 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 05:52:12 crc kubenswrapper[4661]: I1001 05:52:12.839449 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k" event={"ID":"a0c02866-25ee-4ef1-9bba-572422cabc26","Type":"ContainerStarted","Data":"43f4b3671a8cf222cf78751ab89a47f3f3509554d9cdd44e0c0c36ebd568b9f3"} Oct 01 05:52:12 crc kubenswrapper[4661]: I1001 05:52:12.876012 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k" podStartSLOduration=2.333668092 podStartE2EDuration="11.875988054s" podCreationTimestamp="2025-10-01 05:52:01 +0000 UTC" firstStartedPulling="2025-10-01 05:52:02.463797899 +0000 UTC m=+1371.401776523" lastFinishedPulling="2025-10-01 05:52:12.006117861 +0000 UTC m=+1380.944096485" observedRunningTime="2025-10-01 05:52:12.861839939 +0000 UTC m=+1381.799818593" watchObservedRunningTime="2025-10-01 05:52:12.875988054 +0000 UTC m=+1381.813966708" Oct 01 05:52:17 crc kubenswrapper[4661]: I1001 05:52:17.947551 4661 scope.go:117] "RemoveContainer" containerID="e4b706e5b85bcc690eca0586716d67b71f05d9134448ccee9f89b6082624960a" Oct 01 05:52:24 crc kubenswrapper[4661]: I1001 05:52:24.003422 4661 generic.go:334] "Generic (PLEG): container finished" podID="a0c02866-25ee-4ef1-9bba-572422cabc26" containerID="43f4b3671a8cf222cf78751ab89a47f3f3509554d9cdd44e0c0c36ebd568b9f3" exitCode=0 Oct 01 05:52:24 crc kubenswrapper[4661]: I1001 05:52:24.003519 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k" event={"ID":"a0c02866-25ee-4ef1-9bba-572422cabc26","Type":"ContainerDied","Data":"43f4b3671a8cf222cf78751ab89a47f3f3509554d9cdd44e0c0c36ebd568b9f3"} Oct 01 05:52:25 crc kubenswrapper[4661]: I1001 05:52:25.849230 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k" Oct 01 05:52:25 crc kubenswrapper[4661]: I1001 05:52:25.982345 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0c02866-25ee-4ef1-9bba-572422cabc26-repo-setup-combined-ca-bundle\") pod \"a0c02866-25ee-4ef1-9bba-572422cabc26\" (UID: \"a0c02866-25ee-4ef1-9bba-572422cabc26\") " Oct 01 05:52:25 crc kubenswrapper[4661]: I1001 05:52:25.982509 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hvnll\" (UniqueName: \"kubernetes.io/projected/a0c02866-25ee-4ef1-9bba-572422cabc26-kube-api-access-hvnll\") pod \"a0c02866-25ee-4ef1-9bba-572422cabc26\" (UID: \"a0c02866-25ee-4ef1-9bba-572422cabc26\") " Oct 01 05:52:25 crc kubenswrapper[4661]: I1001 05:52:25.982652 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0c02866-25ee-4ef1-9bba-572422cabc26-inventory\") pod \"a0c02866-25ee-4ef1-9bba-572422cabc26\" (UID: \"a0c02866-25ee-4ef1-9bba-572422cabc26\") " Oct 01 05:52:25 crc kubenswrapper[4661]: I1001 05:52:25.982754 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0c02866-25ee-4ef1-9bba-572422cabc26-ssh-key\") pod \"a0c02866-25ee-4ef1-9bba-572422cabc26\" (UID: \"a0c02866-25ee-4ef1-9bba-572422cabc26\") " Oct 01 05:52:25 crc kubenswrapper[4661]: I1001 05:52:25.989102 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0c02866-25ee-4ef1-9bba-572422cabc26-kube-api-access-hvnll" (OuterVolumeSpecName: "kube-api-access-hvnll") pod "a0c02866-25ee-4ef1-9bba-572422cabc26" (UID: "a0c02866-25ee-4ef1-9bba-572422cabc26"). InnerVolumeSpecName "kube-api-access-hvnll". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:52:25 crc kubenswrapper[4661]: I1001 05:52:25.990150 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0c02866-25ee-4ef1-9bba-572422cabc26-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "a0c02866-25ee-4ef1-9bba-572422cabc26" (UID: "a0c02866-25ee-4ef1-9bba-572422cabc26"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.035545 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0c02866-25ee-4ef1-9bba-572422cabc26-inventory" (OuterVolumeSpecName: "inventory") pod "a0c02866-25ee-4ef1-9bba-572422cabc26" (UID: "a0c02866-25ee-4ef1-9bba-572422cabc26"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.036099 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k" event={"ID":"a0c02866-25ee-4ef1-9bba-572422cabc26","Type":"ContainerDied","Data":"ab76b6d3befb9099a8bb9f8483f48ca3d4b4b49499c6d08522958acfa0faf820"} Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.036151 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab76b6d3befb9099a8bb9f8483f48ca3d4b4b49499c6d08522958acfa0faf820" Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.036166 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k" Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.046023 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0c02866-25ee-4ef1-9bba-572422cabc26-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a0c02866-25ee-4ef1-9bba-572422cabc26" (UID: "a0c02866-25ee-4ef1-9bba-572422cabc26"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.088259 4661 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0c02866-25ee-4ef1-9bba-572422cabc26-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.088325 4661 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0c02866-25ee-4ef1-9bba-572422cabc26-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.088349 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hvnll\" (UniqueName: \"kubernetes.io/projected/a0c02866-25ee-4ef1-9bba-572422cabc26-kube-api-access-hvnll\") on node \"crc\" DevicePath \"\"" Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.088367 4661 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0c02866-25ee-4ef1-9bba-572422cabc26-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.114688 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-sjh66"] Oct 01 05:52:26 crc kubenswrapper[4661]: E1001 05:52:26.115293 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0c02866-25ee-4ef1-9bba-572422cabc26" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.115355 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0c02866-25ee-4ef1-9bba-572422cabc26" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.115657 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0c02866-25ee-4ef1-9bba-572422cabc26" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.116445 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-sjh66" Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.132101 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-sjh66"] Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.189964 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dc423888-0bfc-45b9-ba9f-6bc52e8df43b-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-sjh66\" (UID: \"dc423888-0bfc-45b9-ba9f-6bc52e8df43b\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-sjh66" Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.190045 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8m9xc\" (UniqueName: \"kubernetes.io/projected/dc423888-0bfc-45b9-ba9f-6bc52e8df43b-kube-api-access-8m9xc\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-sjh66\" (UID: \"dc423888-0bfc-45b9-ba9f-6bc52e8df43b\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-sjh66" Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.190081 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dc423888-0bfc-45b9-ba9f-6bc52e8df43b-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-sjh66\" (UID: \"dc423888-0bfc-45b9-ba9f-6bc52e8df43b\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-sjh66" Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.292747 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dc423888-0bfc-45b9-ba9f-6bc52e8df43b-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-sjh66\" (UID: \"dc423888-0bfc-45b9-ba9f-6bc52e8df43b\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-sjh66" Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.292818 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8m9xc\" (UniqueName: \"kubernetes.io/projected/dc423888-0bfc-45b9-ba9f-6bc52e8df43b-kube-api-access-8m9xc\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-sjh66\" (UID: \"dc423888-0bfc-45b9-ba9f-6bc52e8df43b\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-sjh66" Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.292854 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dc423888-0bfc-45b9-ba9f-6bc52e8df43b-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-sjh66\" (UID: \"dc423888-0bfc-45b9-ba9f-6bc52e8df43b\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-sjh66" Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.297736 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dc423888-0bfc-45b9-ba9f-6bc52e8df43b-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-sjh66\" (UID: \"dc423888-0bfc-45b9-ba9f-6bc52e8df43b\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-sjh66" Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.300417 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dc423888-0bfc-45b9-ba9f-6bc52e8df43b-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-sjh66\" (UID: \"dc423888-0bfc-45b9-ba9f-6bc52e8df43b\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-sjh66" Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.314293 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8m9xc\" (UniqueName: \"kubernetes.io/projected/dc423888-0bfc-45b9-ba9f-6bc52e8df43b-kube-api-access-8m9xc\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-sjh66\" (UID: \"dc423888-0bfc-45b9-ba9f-6bc52e8df43b\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-sjh66" Oct 01 05:52:26 crc kubenswrapper[4661]: I1001 05:52:26.469889 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-sjh66" Oct 01 05:52:27 crc kubenswrapper[4661]: I1001 05:52:27.075909 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-sjh66"] Oct 01 05:52:27 crc kubenswrapper[4661]: W1001 05:52:27.085687 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddc423888_0bfc_45b9_ba9f_6bc52e8df43b.slice/crio-f3523d3402ac8242807a6b2f9f67e2001b2450ca16f4957264f106c43ec0dd8d WatchSource:0}: Error finding container f3523d3402ac8242807a6b2f9f67e2001b2450ca16f4957264f106c43ec0dd8d: Status 404 returned error can't find the container with id f3523d3402ac8242807a6b2f9f67e2001b2450ca16f4957264f106c43ec0dd8d Oct 01 05:52:28 crc kubenswrapper[4661]: I1001 05:52:28.064661 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-sjh66" event={"ID":"dc423888-0bfc-45b9-ba9f-6bc52e8df43b","Type":"ContainerStarted","Data":"ff565971f3f6850336519d5d9d5a0de5378a88b12f591eb59d5b66e35999b1e3"} Oct 01 05:52:28 crc kubenswrapper[4661]: I1001 05:52:28.065088 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-sjh66" event={"ID":"dc423888-0bfc-45b9-ba9f-6bc52e8df43b","Type":"ContainerStarted","Data":"f3523d3402ac8242807a6b2f9f67e2001b2450ca16f4957264f106c43ec0dd8d"} Oct 01 05:52:28 crc kubenswrapper[4661]: I1001 05:52:28.090678 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-sjh66" podStartSLOduration=1.691204133 podStartE2EDuration="2.090658945s" podCreationTimestamp="2025-10-01 05:52:26 +0000 UTC" firstStartedPulling="2025-10-01 05:52:27.090027797 +0000 UTC m=+1396.028006411" lastFinishedPulling="2025-10-01 05:52:27.489482569 +0000 UTC m=+1396.427461223" observedRunningTime="2025-10-01 05:52:28.090011678 +0000 UTC m=+1397.027990322" watchObservedRunningTime="2025-10-01 05:52:28.090658945 +0000 UTC m=+1397.028637559" Oct 01 05:52:31 crc kubenswrapper[4661]: I1001 05:52:31.104420 4661 generic.go:334] "Generic (PLEG): container finished" podID="dc423888-0bfc-45b9-ba9f-6bc52e8df43b" containerID="ff565971f3f6850336519d5d9d5a0de5378a88b12f591eb59d5b66e35999b1e3" exitCode=0 Oct 01 05:52:31 crc kubenswrapper[4661]: I1001 05:52:31.105858 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-sjh66" event={"ID":"dc423888-0bfc-45b9-ba9f-6bc52e8df43b","Type":"ContainerDied","Data":"ff565971f3f6850336519d5d9d5a0de5378a88b12f591eb59d5b66e35999b1e3"} Oct 01 05:52:32 crc kubenswrapper[4661]: I1001 05:52:32.624091 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-sjh66" Oct 01 05:52:32 crc kubenswrapper[4661]: I1001 05:52:32.761599 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8m9xc\" (UniqueName: \"kubernetes.io/projected/dc423888-0bfc-45b9-ba9f-6bc52e8df43b-kube-api-access-8m9xc\") pod \"dc423888-0bfc-45b9-ba9f-6bc52e8df43b\" (UID: \"dc423888-0bfc-45b9-ba9f-6bc52e8df43b\") " Oct 01 05:52:32 crc kubenswrapper[4661]: I1001 05:52:32.761732 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dc423888-0bfc-45b9-ba9f-6bc52e8df43b-ssh-key\") pod \"dc423888-0bfc-45b9-ba9f-6bc52e8df43b\" (UID: \"dc423888-0bfc-45b9-ba9f-6bc52e8df43b\") " Oct 01 05:52:32 crc kubenswrapper[4661]: I1001 05:52:32.761776 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dc423888-0bfc-45b9-ba9f-6bc52e8df43b-inventory\") pod \"dc423888-0bfc-45b9-ba9f-6bc52e8df43b\" (UID: \"dc423888-0bfc-45b9-ba9f-6bc52e8df43b\") " Oct 01 05:52:32 crc kubenswrapper[4661]: I1001 05:52:32.770024 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc423888-0bfc-45b9-ba9f-6bc52e8df43b-kube-api-access-8m9xc" (OuterVolumeSpecName: "kube-api-access-8m9xc") pod "dc423888-0bfc-45b9-ba9f-6bc52e8df43b" (UID: "dc423888-0bfc-45b9-ba9f-6bc52e8df43b"). InnerVolumeSpecName "kube-api-access-8m9xc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:52:32 crc kubenswrapper[4661]: I1001 05:52:32.812416 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc423888-0bfc-45b9-ba9f-6bc52e8df43b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "dc423888-0bfc-45b9-ba9f-6bc52e8df43b" (UID: "dc423888-0bfc-45b9-ba9f-6bc52e8df43b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:52:32 crc kubenswrapper[4661]: I1001 05:52:32.816747 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc423888-0bfc-45b9-ba9f-6bc52e8df43b-inventory" (OuterVolumeSpecName: "inventory") pod "dc423888-0bfc-45b9-ba9f-6bc52e8df43b" (UID: "dc423888-0bfc-45b9-ba9f-6bc52e8df43b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:52:32 crc kubenswrapper[4661]: I1001 05:52:32.864536 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8m9xc\" (UniqueName: \"kubernetes.io/projected/dc423888-0bfc-45b9-ba9f-6bc52e8df43b-kube-api-access-8m9xc\") on node \"crc\" DevicePath \"\"" Oct 01 05:52:32 crc kubenswrapper[4661]: I1001 05:52:32.864573 4661 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dc423888-0bfc-45b9-ba9f-6bc52e8df43b-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 05:52:32 crc kubenswrapper[4661]: I1001 05:52:32.864586 4661 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dc423888-0bfc-45b9-ba9f-6bc52e8df43b-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.131801 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-sjh66" event={"ID":"dc423888-0bfc-45b9-ba9f-6bc52e8df43b","Type":"ContainerDied","Data":"f3523d3402ac8242807a6b2f9f67e2001b2450ca16f4957264f106c43ec0dd8d"} Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.131859 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3523d3402ac8242807a6b2f9f67e2001b2450ca16f4957264f106c43ec0dd8d" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.132408 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-sjh66" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.262339 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6"] Oct 01 05:52:33 crc kubenswrapper[4661]: E1001 05:52:33.263758 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc423888-0bfc-45b9-ba9f-6bc52e8df43b" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.263841 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc423888-0bfc-45b9-ba9f-6bc52e8df43b" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.264441 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc423888-0bfc-45b9-ba9f-6bc52e8df43b" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.266603 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.270109 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-srk7f" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.270444 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.270740 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.271438 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.275055 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6"] Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.377652 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6\" (UID: \"bbd00e4d-aa89-4800-867e-d8f78c3d2c70\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.378033 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6\" (UID: \"bbd00e4d-aa89-4800-867e-d8f78c3d2c70\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.378189 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6\" (UID: \"bbd00e4d-aa89-4800-867e-d8f78c3d2c70\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.378301 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttzfl\" (UniqueName: \"kubernetes.io/projected/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-kube-api-access-ttzfl\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6\" (UID: \"bbd00e4d-aa89-4800-867e-d8f78c3d2c70\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.481196 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6\" (UID: \"bbd00e4d-aa89-4800-867e-d8f78c3d2c70\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.481276 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6\" (UID: \"bbd00e4d-aa89-4800-867e-d8f78c3d2c70\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.481315 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttzfl\" (UniqueName: \"kubernetes.io/projected/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-kube-api-access-ttzfl\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6\" (UID: \"bbd00e4d-aa89-4800-867e-d8f78c3d2c70\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.481399 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6\" (UID: \"bbd00e4d-aa89-4800-867e-d8f78c3d2c70\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.487287 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6\" (UID: \"bbd00e4d-aa89-4800-867e-d8f78c3d2c70\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.487691 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6\" (UID: \"bbd00e4d-aa89-4800-867e-d8f78c3d2c70\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.489246 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6\" (UID: \"bbd00e4d-aa89-4800-867e-d8f78c3d2c70\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.512857 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttzfl\" (UniqueName: \"kubernetes.io/projected/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-kube-api-access-ttzfl\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6\" (UID: \"bbd00e4d-aa89-4800-867e-d8f78c3d2c70\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6" Oct 01 05:52:33 crc kubenswrapper[4661]: I1001 05:52:33.598438 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6" Oct 01 05:52:34 crc kubenswrapper[4661]: I1001 05:52:34.018061 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6"] Oct 01 05:52:34 crc kubenswrapper[4661]: I1001 05:52:34.162298 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6" event={"ID":"bbd00e4d-aa89-4800-867e-d8f78c3d2c70","Type":"ContainerStarted","Data":"c7310715e4bbeef7bb7f9e11692f549c8509410c45d3077a49a2dd67d1c327d3"} Oct 01 05:52:35 crc kubenswrapper[4661]: I1001 05:52:35.173285 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6" event={"ID":"bbd00e4d-aa89-4800-867e-d8f78c3d2c70","Type":"ContainerStarted","Data":"0a8772f209c77b1bc6402b3abb6ba77467dfa7504028871dc8a0a517b4379ba6"} Oct 01 05:52:35 crc kubenswrapper[4661]: I1001 05:52:35.191552 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6" podStartSLOduration=1.6929976039999999 podStartE2EDuration="2.19153s" podCreationTimestamp="2025-10-01 05:52:33 +0000 UTC" firstStartedPulling="2025-10-01 05:52:34.026997234 +0000 UTC m=+1402.964975848" lastFinishedPulling="2025-10-01 05:52:34.52552959 +0000 UTC m=+1403.463508244" observedRunningTime="2025-10-01 05:52:35.190189593 +0000 UTC m=+1404.128168217" watchObservedRunningTime="2025-10-01 05:52:35.19153 +0000 UTC m=+1404.129508624" Oct 01 05:53:03 crc kubenswrapper[4661]: I1001 05:53:03.459126 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lqlkm"] Oct 01 05:53:03 crc kubenswrapper[4661]: I1001 05:53:03.464361 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lqlkm" Oct 01 05:53:03 crc kubenswrapper[4661]: I1001 05:53:03.483217 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lqlkm"] Oct 01 05:53:03 crc kubenswrapper[4661]: I1001 05:53:03.532089 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwqv6\" (UniqueName: \"kubernetes.io/projected/918474ca-d1fb-44a1-a07f-96c072ded353-kube-api-access-vwqv6\") pod \"community-operators-lqlkm\" (UID: \"918474ca-d1fb-44a1-a07f-96c072ded353\") " pod="openshift-marketplace/community-operators-lqlkm" Oct 01 05:53:03 crc kubenswrapper[4661]: I1001 05:53:03.532260 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/918474ca-d1fb-44a1-a07f-96c072ded353-utilities\") pod \"community-operators-lqlkm\" (UID: \"918474ca-d1fb-44a1-a07f-96c072ded353\") " pod="openshift-marketplace/community-operators-lqlkm" Oct 01 05:53:03 crc kubenswrapper[4661]: I1001 05:53:03.532305 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/918474ca-d1fb-44a1-a07f-96c072ded353-catalog-content\") pod \"community-operators-lqlkm\" (UID: \"918474ca-d1fb-44a1-a07f-96c072ded353\") " pod="openshift-marketplace/community-operators-lqlkm" Oct 01 05:53:03 crc kubenswrapper[4661]: I1001 05:53:03.634116 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/918474ca-d1fb-44a1-a07f-96c072ded353-utilities\") pod \"community-operators-lqlkm\" (UID: \"918474ca-d1fb-44a1-a07f-96c072ded353\") " pod="openshift-marketplace/community-operators-lqlkm" Oct 01 05:53:03 crc kubenswrapper[4661]: I1001 05:53:03.634188 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/918474ca-d1fb-44a1-a07f-96c072ded353-catalog-content\") pod \"community-operators-lqlkm\" (UID: \"918474ca-d1fb-44a1-a07f-96c072ded353\") " pod="openshift-marketplace/community-operators-lqlkm" Oct 01 05:53:03 crc kubenswrapper[4661]: I1001 05:53:03.634289 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwqv6\" (UniqueName: \"kubernetes.io/projected/918474ca-d1fb-44a1-a07f-96c072ded353-kube-api-access-vwqv6\") pod \"community-operators-lqlkm\" (UID: \"918474ca-d1fb-44a1-a07f-96c072ded353\") " pod="openshift-marketplace/community-operators-lqlkm" Oct 01 05:53:03 crc kubenswrapper[4661]: I1001 05:53:03.634769 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/918474ca-d1fb-44a1-a07f-96c072ded353-utilities\") pod \"community-operators-lqlkm\" (UID: \"918474ca-d1fb-44a1-a07f-96c072ded353\") " pod="openshift-marketplace/community-operators-lqlkm" Oct 01 05:53:03 crc kubenswrapper[4661]: I1001 05:53:03.635152 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/918474ca-d1fb-44a1-a07f-96c072ded353-catalog-content\") pod \"community-operators-lqlkm\" (UID: \"918474ca-d1fb-44a1-a07f-96c072ded353\") " pod="openshift-marketplace/community-operators-lqlkm" Oct 01 05:53:03 crc kubenswrapper[4661]: I1001 05:53:03.658545 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwqv6\" (UniqueName: \"kubernetes.io/projected/918474ca-d1fb-44a1-a07f-96c072ded353-kube-api-access-vwqv6\") pod \"community-operators-lqlkm\" (UID: \"918474ca-d1fb-44a1-a07f-96c072ded353\") " pod="openshift-marketplace/community-operators-lqlkm" Oct 01 05:53:03 crc kubenswrapper[4661]: I1001 05:53:03.805151 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lqlkm" Oct 01 05:53:04 crc kubenswrapper[4661]: I1001 05:53:04.294771 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lqlkm"] Oct 01 05:53:04 crc kubenswrapper[4661]: I1001 05:53:04.577197 4661 generic.go:334] "Generic (PLEG): container finished" podID="918474ca-d1fb-44a1-a07f-96c072ded353" containerID="816211a944b03f927623e6c5d2b671b2bb2db9f09cc057ee70fbec3746041bfa" exitCode=0 Oct 01 05:53:04 crc kubenswrapper[4661]: I1001 05:53:04.577241 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lqlkm" event={"ID":"918474ca-d1fb-44a1-a07f-96c072ded353","Type":"ContainerDied","Data":"816211a944b03f927623e6c5d2b671b2bb2db9f09cc057ee70fbec3746041bfa"} Oct 01 05:53:04 crc kubenswrapper[4661]: I1001 05:53:04.577271 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lqlkm" event={"ID":"918474ca-d1fb-44a1-a07f-96c072ded353","Type":"ContainerStarted","Data":"381fdfd61b211e9f8162682a4b828c07662163c5571138cebe665507402aca45"} Oct 01 05:53:09 crc kubenswrapper[4661]: I1001 05:53:09.640233 4661 generic.go:334] "Generic (PLEG): container finished" podID="918474ca-d1fb-44a1-a07f-96c072ded353" containerID="9646a590f08f77504dde1b788355fd495dec0d6aae5f8b4dad5a7ba71e43e5af" exitCode=0 Oct 01 05:53:09 crc kubenswrapper[4661]: I1001 05:53:09.640301 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lqlkm" event={"ID":"918474ca-d1fb-44a1-a07f-96c072ded353","Type":"ContainerDied","Data":"9646a590f08f77504dde1b788355fd495dec0d6aae5f8b4dad5a7ba71e43e5af"} Oct 01 05:53:10 crc kubenswrapper[4661]: I1001 05:53:10.674082 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lqlkm" event={"ID":"918474ca-d1fb-44a1-a07f-96c072ded353","Type":"ContainerStarted","Data":"5ea16040000926bba3cba6f78d5431149a1e3ca147dd91774365af0f7701a014"} Oct 01 05:53:10 crc kubenswrapper[4661]: I1001 05:53:10.710427 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lqlkm" podStartSLOduration=2.094647878 podStartE2EDuration="7.710406969s" podCreationTimestamp="2025-10-01 05:53:03 +0000 UTC" firstStartedPulling="2025-10-01 05:53:04.580345702 +0000 UTC m=+1433.518324316" lastFinishedPulling="2025-10-01 05:53:10.196104763 +0000 UTC m=+1439.134083407" observedRunningTime="2025-10-01 05:53:10.700482548 +0000 UTC m=+1439.638461192" watchObservedRunningTime="2025-10-01 05:53:10.710406969 +0000 UTC m=+1439.648385603" Oct 01 05:53:13 crc kubenswrapper[4661]: I1001 05:53:13.805507 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lqlkm" Oct 01 05:53:13 crc kubenswrapper[4661]: I1001 05:53:13.805920 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lqlkm" Oct 01 05:53:13 crc kubenswrapper[4661]: I1001 05:53:13.899463 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lqlkm" Oct 01 05:53:18 crc kubenswrapper[4661]: I1001 05:53:18.089313 4661 scope.go:117] "RemoveContainer" containerID="33c88f372e0330b3ed7784e5237e5ef58e8d698e6604b7c3dee3210e757d9427" Oct 01 05:53:18 crc kubenswrapper[4661]: I1001 05:53:18.138689 4661 scope.go:117] "RemoveContainer" containerID="2ada95fe4bda404b5dd58857619ad25cdc3c3801d25f51ee4e55f0ba0fc2deea" Oct 01 05:53:23 crc kubenswrapper[4661]: I1001 05:53:23.897426 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lqlkm" Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.013587 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lqlkm"] Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.063609 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jkssm"] Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.064070 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jkssm" podUID="5d5a94d5-db6c-439b-912e-ea501e444d57" containerName="registry-server" containerID="cri-o://a84d6839cb472865f6ebf6ad47fcd688ddf50ef92582a05f78db34d00a64980e" gracePeriod=2 Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.578356 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jkssm" Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.676105 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7244z\" (UniqueName: \"kubernetes.io/projected/5d5a94d5-db6c-439b-912e-ea501e444d57-kube-api-access-7244z\") pod \"5d5a94d5-db6c-439b-912e-ea501e444d57\" (UID: \"5d5a94d5-db6c-439b-912e-ea501e444d57\") " Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.676191 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d5a94d5-db6c-439b-912e-ea501e444d57-utilities\") pod \"5d5a94d5-db6c-439b-912e-ea501e444d57\" (UID: \"5d5a94d5-db6c-439b-912e-ea501e444d57\") " Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.676247 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d5a94d5-db6c-439b-912e-ea501e444d57-catalog-content\") pod \"5d5a94d5-db6c-439b-912e-ea501e444d57\" (UID: \"5d5a94d5-db6c-439b-912e-ea501e444d57\") " Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.676916 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d5a94d5-db6c-439b-912e-ea501e444d57-utilities" (OuterVolumeSpecName: "utilities") pod "5d5a94d5-db6c-439b-912e-ea501e444d57" (UID: "5d5a94d5-db6c-439b-912e-ea501e444d57"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.682810 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d5a94d5-db6c-439b-912e-ea501e444d57-kube-api-access-7244z" (OuterVolumeSpecName: "kube-api-access-7244z") pod "5d5a94d5-db6c-439b-912e-ea501e444d57" (UID: "5d5a94d5-db6c-439b-912e-ea501e444d57"). InnerVolumeSpecName "kube-api-access-7244z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.739614 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d5a94d5-db6c-439b-912e-ea501e444d57-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5d5a94d5-db6c-439b-912e-ea501e444d57" (UID: "5d5a94d5-db6c-439b-912e-ea501e444d57"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.778929 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7244z\" (UniqueName: \"kubernetes.io/projected/5d5a94d5-db6c-439b-912e-ea501e444d57-kube-api-access-7244z\") on node \"crc\" DevicePath \"\"" Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.778961 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d5a94d5-db6c-439b-912e-ea501e444d57-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.778970 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d5a94d5-db6c-439b-912e-ea501e444d57-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.870192 4661 generic.go:334] "Generic (PLEG): container finished" podID="5d5a94d5-db6c-439b-912e-ea501e444d57" containerID="a84d6839cb472865f6ebf6ad47fcd688ddf50ef92582a05f78db34d00a64980e" exitCode=0 Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.870470 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jkssm" Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.870394 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jkssm" event={"ID":"5d5a94d5-db6c-439b-912e-ea501e444d57","Type":"ContainerDied","Data":"a84d6839cb472865f6ebf6ad47fcd688ddf50ef92582a05f78db34d00a64980e"} Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.870746 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jkssm" event={"ID":"5d5a94d5-db6c-439b-912e-ea501e444d57","Type":"ContainerDied","Data":"aaca2fd53067a56e23127c1686deab7595163aa2b7c504cfc5d6a697731aa1d5"} Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.870786 4661 scope.go:117] "RemoveContainer" containerID="a84d6839cb472865f6ebf6ad47fcd688ddf50ef92582a05f78db34d00a64980e" Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.899542 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jkssm"] Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.900164 4661 scope.go:117] "RemoveContainer" containerID="9f1964303ba12236decdbf438833e61a048806e4a8265cd6b254b54253680f95" Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.906599 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jkssm"] Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.922670 4661 scope.go:117] "RemoveContainer" containerID="94b6b95bb33d0fbd559ca31b85767f315fa56d0ef6fc93a356295c457e23f891" Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.962138 4661 scope.go:117] "RemoveContainer" containerID="a84d6839cb472865f6ebf6ad47fcd688ddf50ef92582a05f78db34d00a64980e" Oct 01 05:53:24 crc kubenswrapper[4661]: E1001 05:53:24.962605 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a84d6839cb472865f6ebf6ad47fcd688ddf50ef92582a05f78db34d00a64980e\": container with ID starting with a84d6839cb472865f6ebf6ad47fcd688ddf50ef92582a05f78db34d00a64980e not found: ID does not exist" containerID="a84d6839cb472865f6ebf6ad47fcd688ddf50ef92582a05f78db34d00a64980e" Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.962662 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a84d6839cb472865f6ebf6ad47fcd688ddf50ef92582a05f78db34d00a64980e"} err="failed to get container status \"a84d6839cb472865f6ebf6ad47fcd688ddf50ef92582a05f78db34d00a64980e\": rpc error: code = NotFound desc = could not find container \"a84d6839cb472865f6ebf6ad47fcd688ddf50ef92582a05f78db34d00a64980e\": container with ID starting with a84d6839cb472865f6ebf6ad47fcd688ddf50ef92582a05f78db34d00a64980e not found: ID does not exist" Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.962685 4661 scope.go:117] "RemoveContainer" containerID="9f1964303ba12236decdbf438833e61a048806e4a8265cd6b254b54253680f95" Oct 01 05:53:24 crc kubenswrapper[4661]: E1001 05:53:24.962955 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f1964303ba12236decdbf438833e61a048806e4a8265cd6b254b54253680f95\": container with ID starting with 9f1964303ba12236decdbf438833e61a048806e4a8265cd6b254b54253680f95 not found: ID does not exist" containerID="9f1964303ba12236decdbf438833e61a048806e4a8265cd6b254b54253680f95" Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.962978 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f1964303ba12236decdbf438833e61a048806e4a8265cd6b254b54253680f95"} err="failed to get container status \"9f1964303ba12236decdbf438833e61a048806e4a8265cd6b254b54253680f95\": rpc error: code = NotFound desc = could not find container \"9f1964303ba12236decdbf438833e61a048806e4a8265cd6b254b54253680f95\": container with ID starting with 9f1964303ba12236decdbf438833e61a048806e4a8265cd6b254b54253680f95 not found: ID does not exist" Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.962991 4661 scope.go:117] "RemoveContainer" containerID="94b6b95bb33d0fbd559ca31b85767f315fa56d0ef6fc93a356295c457e23f891" Oct 01 05:53:24 crc kubenswrapper[4661]: E1001 05:53:24.963362 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94b6b95bb33d0fbd559ca31b85767f315fa56d0ef6fc93a356295c457e23f891\": container with ID starting with 94b6b95bb33d0fbd559ca31b85767f315fa56d0ef6fc93a356295c457e23f891 not found: ID does not exist" containerID="94b6b95bb33d0fbd559ca31b85767f315fa56d0ef6fc93a356295c457e23f891" Oct 01 05:53:24 crc kubenswrapper[4661]: I1001 05:53:24.963405 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94b6b95bb33d0fbd559ca31b85767f315fa56d0ef6fc93a356295c457e23f891"} err="failed to get container status \"94b6b95bb33d0fbd559ca31b85767f315fa56d0ef6fc93a356295c457e23f891\": rpc error: code = NotFound desc = could not find container \"94b6b95bb33d0fbd559ca31b85767f315fa56d0ef6fc93a356295c457e23f891\": container with ID starting with 94b6b95bb33d0fbd559ca31b85767f315fa56d0ef6fc93a356295c457e23f891 not found: ID does not exist" Oct 01 05:53:25 crc kubenswrapper[4661]: I1001 05:53:25.785180 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d5a94d5-db6c-439b-912e-ea501e444d57" path="/var/lib/kubelet/pods/5d5a94d5-db6c-439b-912e-ea501e444d57/volumes" Oct 01 05:53:34 crc kubenswrapper[4661]: I1001 05:53:34.309622 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:53:34 crc kubenswrapper[4661]: I1001 05:53:34.310402 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:54:04 crc kubenswrapper[4661]: I1001 05:54:04.309054 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:54:04 crc kubenswrapper[4661]: I1001 05:54:04.309758 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:54:14 crc kubenswrapper[4661]: I1001 05:54:14.752856 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lwl5b"] Oct 01 05:54:14 crc kubenswrapper[4661]: E1001 05:54:14.765835 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d5a94d5-db6c-439b-912e-ea501e444d57" containerName="registry-server" Oct 01 05:54:14 crc kubenswrapper[4661]: I1001 05:54:14.765859 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d5a94d5-db6c-439b-912e-ea501e444d57" containerName="registry-server" Oct 01 05:54:14 crc kubenswrapper[4661]: E1001 05:54:14.765878 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d5a94d5-db6c-439b-912e-ea501e444d57" containerName="extract-utilities" Oct 01 05:54:14 crc kubenswrapper[4661]: I1001 05:54:14.765886 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d5a94d5-db6c-439b-912e-ea501e444d57" containerName="extract-utilities" Oct 01 05:54:14 crc kubenswrapper[4661]: E1001 05:54:14.765904 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d5a94d5-db6c-439b-912e-ea501e444d57" containerName="extract-content" Oct 01 05:54:14 crc kubenswrapper[4661]: I1001 05:54:14.765913 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d5a94d5-db6c-439b-912e-ea501e444d57" containerName="extract-content" Oct 01 05:54:14 crc kubenswrapper[4661]: I1001 05:54:14.766152 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d5a94d5-db6c-439b-912e-ea501e444d57" containerName="registry-server" Oct 01 05:54:14 crc kubenswrapper[4661]: I1001 05:54:14.767936 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lwl5b"] Oct 01 05:54:14 crc kubenswrapper[4661]: I1001 05:54:14.769050 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lwl5b" Oct 01 05:54:14 crc kubenswrapper[4661]: I1001 05:54:14.861682 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/115a5a71-b424-4d1e-bd84-4ba4f3fd8321-utilities\") pod \"redhat-operators-lwl5b\" (UID: \"115a5a71-b424-4d1e-bd84-4ba4f3fd8321\") " pod="openshift-marketplace/redhat-operators-lwl5b" Oct 01 05:54:14 crc kubenswrapper[4661]: I1001 05:54:14.861859 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/115a5a71-b424-4d1e-bd84-4ba4f3fd8321-catalog-content\") pod \"redhat-operators-lwl5b\" (UID: \"115a5a71-b424-4d1e-bd84-4ba4f3fd8321\") " pod="openshift-marketplace/redhat-operators-lwl5b" Oct 01 05:54:14 crc kubenswrapper[4661]: I1001 05:54:14.861877 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bls6\" (UniqueName: \"kubernetes.io/projected/115a5a71-b424-4d1e-bd84-4ba4f3fd8321-kube-api-access-6bls6\") pod \"redhat-operators-lwl5b\" (UID: \"115a5a71-b424-4d1e-bd84-4ba4f3fd8321\") " pod="openshift-marketplace/redhat-operators-lwl5b" Oct 01 05:54:14 crc kubenswrapper[4661]: I1001 05:54:14.963545 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/115a5a71-b424-4d1e-bd84-4ba4f3fd8321-utilities\") pod \"redhat-operators-lwl5b\" (UID: \"115a5a71-b424-4d1e-bd84-4ba4f3fd8321\") " pod="openshift-marketplace/redhat-operators-lwl5b" Oct 01 05:54:14 crc kubenswrapper[4661]: I1001 05:54:14.963812 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/115a5a71-b424-4d1e-bd84-4ba4f3fd8321-catalog-content\") pod \"redhat-operators-lwl5b\" (UID: \"115a5a71-b424-4d1e-bd84-4ba4f3fd8321\") " pod="openshift-marketplace/redhat-operators-lwl5b" Oct 01 05:54:14 crc kubenswrapper[4661]: I1001 05:54:14.963847 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bls6\" (UniqueName: \"kubernetes.io/projected/115a5a71-b424-4d1e-bd84-4ba4f3fd8321-kube-api-access-6bls6\") pod \"redhat-operators-lwl5b\" (UID: \"115a5a71-b424-4d1e-bd84-4ba4f3fd8321\") " pod="openshift-marketplace/redhat-operators-lwl5b" Oct 01 05:54:14 crc kubenswrapper[4661]: I1001 05:54:14.964163 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/115a5a71-b424-4d1e-bd84-4ba4f3fd8321-utilities\") pod \"redhat-operators-lwl5b\" (UID: \"115a5a71-b424-4d1e-bd84-4ba4f3fd8321\") " pod="openshift-marketplace/redhat-operators-lwl5b" Oct 01 05:54:14 crc kubenswrapper[4661]: I1001 05:54:14.964292 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/115a5a71-b424-4d1e-bd84-4ba4f3fd8321-catalog-content\") pod \"redhat-operators-lwl5b\" (UID: \"115a5a71-b424-4d1e-bd84-4ba4f3fd8321\") " pod="openshift-marketplace/redhat-operators-lwl5b" Oct 01 05:54:14 crc kubenswrapper[4661]: I1001 05:54:14.988362 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bls6\" (UniqueName: \"kubernetes.io/projected/115a5a71-b424-4d1e-bd84-4ba4f3fd8321-kube-api-access-6bls6\") pod \"redhat-operators-lwl5b\" (UID: \"115a5a71-b424-4d1e-bd84-4ba4f3fd8321\") " pod="openshift-marketplace/redhat-operators-lwl5b" Oct 01 05:54:15 crc kubenswrapper[4661]: I1001 05:54:15.108089 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lwl5b" Oct 01 05:54:15 crc kubenswrapper[4661]: I1001 05:54:15.580768 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lwl5b"] Oct 01 05:54:16 crc kubenswrapper[4661]: I1001 05:54:16.548329 4661 generic.go:334] "Generic (PLEG): container finished" podID="115a5a71-b424-4d1e-bd84-4ba4f3fd8321" containerID="625610b92685c0bd7f4b39c2d6c921c27675097fe7b49d2718631f4dbef90feb" exitCode=0 Oct 01 05:54:16 crc kubenswrapper[4661]: I1001 05:54:16.548393 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lwl5b" event={"ID":"115a5a71-b424-4d1e-bd84-4ba4f3fd8321","Type":"ContainerDied","Data":"625610b92685c0bd7f4b39c2d6c921c27675097fe7b49d2718631f4dbef90feb"} Oct 01 05:54:16 crc kubenswrapper[4661]: I1001 05:54:16.548430 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lwl5b" event={"ID":"115a5a71-b424-4d1e-bd84-4ba4f3fd8321","Type":"ContainerStarted","Data":"0128d3487a2930d0fb6959565acf05a8b7ed67410569e1cf07653036f6db6a30"} Oct 01 05:54:16 crc kubenswrapper[4661]: I1001 05:54:16.551994 4661 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 05:54:18 crc kubenswrapper[4661]: I1001 05:54:18.262285 4661 scope.go:117] "RemoveContainer" containerID="4b1f79d5d6c33963aa97163df184d53bfbe1c00d491dc4b9cc4aa4e115f27c83" Oct 01 05:54:18 crc kubenswrapper[4661]: I1001 05:54:18.285333 4661 scope.go:117] "RemoveContainer" containerID="259982fb4b657562c20206c08d94cf8f2e864b923f2d6af89da07aa5071b9acc" Oct 01 05:54:18 crc kubenswrapper[4661]: I1001 05:54:18.316589 4661 scope.go:117] "RemoveContainer" containerID="807a61224a2c0f3f2b67c701b6c1327b09ad43c8eca957f3fda17dc18a7d7a29" Oct 01 05:54:18 crc kubenswrapper[4661]: I1001 05:54:18.522835 4661 scope.go:117] "RemoveContainer" containerID="61786f3482eadec9b644f895ece00cb4a5b234149d52d724f7c9420b06973761" Oct 01 05:54:18 crc kubenswrapper[4661]: I1001 05:54:18.555405 4661 scope.go:117] "RemoveContainer" containerID="ade549bbffffc096c018670a7b49048e6ced4aabb5cea949d8cea17695ab06e1" Oct 01 05:54:18 crc kubenswrapper[4661]: I1001 05:54:18.572350 4661 generic.go:334] "Generic (PLEG): container finished" podID="115a5a71-b424-4d1e-bd84-4ba4f3fd8321" containerID="9c4cbf626cbf5ad2c2b7dc37470786e7ede949ac95e1b6ab153eaa92919bff2c" exitCode=0 Oct 01 05:54:18 crc kubenswrapper[4661]: I1001 05:54:18.572413 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lwl5b" event={"ID":"115a5a71-b424-4d1e-bd84-4ba4f3fd8321","Type":"ContainerDied","Data":"9c4cbf626cbf5ad2c2b7dc37470786e7ede949ac95e1b6ab153eaa92919bff2c"} Oct 01 05:54:18 crc kubenswrapper[4661]: I1001 05:54:18.600908 4661 scope.go:117] "RemoveContainer" containerID="0e0699d367b1f9b5183f6c53f3a7ad35da88f927509ba1de72bcfa012efa1a1b" Oct 01 05:54:18 crc kubenswrapper[4661]: I1001 05:54:18.639168 4661 scope.go:117] "RemoveContainer" containerID="75c7554a1930a34e735c9baff4c57858069e98b225029ba50465d4575c905cc2" Oct 01 05:54:19 crc kubenswrapper[4661]: I1001 05:54:19.599940 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lwl5b" event={"ID":"115a5a71-b424-4d1e-bd84-4ba4f3fd8321","Type":"ContainerStarted","Data":"bc059051fa9c18d5dcf40b5d32e43d27b9d493a6bb789615a14480a118598aa4"} Oct 01 05:54:19 crc kubenswrapper[4661]: I1001 05:54:19.633835 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lwl5b" podStartSLOduration=3.160732964 podStartE2EDuration="5.633814498s" podCreationTimestamp="2025-10-01 05:54:14 +0000 UTC" firstStartedPulling="2025-10-01 05:54:16.551660927 +0000 UTC m=+1505.489639551" lastFinishedPulling="2025-10-01 05:54:19.024742431 +0000 UTC m=+1507.962721085" observedRunningTime="2025-10-01 05:54:19.631496855 +0000 UTC m=+1508.569475509" watchObservedRunningTime="2025-10-01 05:54:19.633814498 +0000 UTC m=+1508.571793132" Oct 01 05:54:25 crc kubenswrapper[4661]: I1001 05:54:25.108994 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lwl5b" Oct 01 05:54:25 crc kubenswrapper[4661]: I1001 05:54:25.109863 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lwl5b" Oct 01 05:54:25 crc kubenswrapper[4661]: I1001 05:54:25.202722 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lwl5b" Oct 01 05:54:25 crc kubenswrapper[4661]: I1001 05:54:25.724291 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lwl5b" Oct 01 05:54:25 crc kubenswrapper[4661]: I1001 05:54:25.788992 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lwl5b"] Oct 01 05:54:27 crc kubenswrapper[4661]: I1001 05:54:27.684763 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lwl5b" podUID="115a5a71-b424-4d1e-bd84-4ba4f3fd8321" containerName="registry-server" containerID="cri-o://bc059051fa9c18d5dcf40b5d32e43d27b9d493a6bb789615a14480a118598aa4" gracePeriod=2 Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.179663 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lwl5b" Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.274305 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/115a5a71-b424-4d1e-bd84-4ba4f3fd8321-utilities\") pod \"115a5a71-b424-4d1e-bd84-4ba4f3fd8321\" (UID: \"115a5a71-b424-4d1e-bd84-4ba4f3fd8321\") " Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.274419 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bls6\" (UniqueName: \"kubernetes.io/projected/115a5a71-b424-4d1e-bd84-4ba4f3fd8321-kube-api-access-6bls6\") pod \"115a5a71-b424-4d1e-bd84-4ba4f3fd8321\" (UID: \"115a5a71-b424-4d1e-bd84-4ba4f3fd8321\") " Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.274498 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/115a5a71-b424-4d1e-bd84-4ba4f3fd8321-catalog-content\") pod \"115a5a71-b424-4d1e-bd84-4ba4f3fd8321\" (UID: \"115a5a71-b424-4d1e-bd84-4ba4f3fd8321\") " Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.275224 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/115a5a71-b424-4d1e-bd84-4ba4f3fd8321-utilities" (OuterVolumeSpecName: "utilities") pod "115a5a71-b424-4d1e-bd84-4ba4f3fd8321" (UID: "115a5a71-b424-4d1e-bd84-4ba4f3fd8321"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.282809 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/115a5a71-b424-4d1e-bd84-4ba4f3fd8321-kube-api-access-6bls6" (OuterVolumeSpecName: "kube-api-access-6bls6") pod "115a5a71-b424-4d1e-bd84-4ba4f3fd8321" (UID: "115a5a71-b424-4d1e-bd84-4ba4f3fd8321"). InnerVolumeSpecName "kube-api-access-6bls6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.376797 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/115a5a71-b424-4d1e-bd84-4ba4f3fd8321-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.376832 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bls6\" (UniqueName: \"kubernetes.io/projected/115a5a71-b424-4d1e-bd84-4ba4f3fd8321-kube-api-access-6bls6\") on node \"crc\" DevicePath \"\"" Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.396888 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/115a5a71-b424-4d1e-bd84-4ba4f3fd8321-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "115a5a71-b424-4d1e-bd84-4ba4f3fd8321" (UID: "115a5a71-b424-4d1e-bd84-4ba4f3fd8321"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.479589 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/115a5a71-b424-4d1e-bd84-4ba4f3fd8321-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.699852 4661 generic.go:334] "Generic (PLEG): container finished" podID="115a5a71-b424-4d1e-bd84-4ba4f3fd8321" containerID="bc059051fa9c18d5dcf40b5d32e43d27b9d493a6bb789615a14480a118598aa4" exitCode=0 Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.699913 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lwl5b" event={"ID":"115a5a71-b424-4d1e-bd84-4ba4f3fd8321","Type":"ContainerDied","Data":"bc059051fa9c18d5dcf40b5d32e43d27b9d493a6bb789615a14480a118598aa4"} Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.699953 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lwl5b" event={"ID":"115a5a71-b424-4d1e-bd84-4ba4f3fd8321","Type":"ContainerDied","Data":"0128d3487a2930d0fb6959565acf05a8b7ed67410569e1cf07653036f6db6a30"} Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.699985 4661 scope.go:117] "RemoveContainer" containerID="bc059051fa9c18d5dcf40b5d32e43d27b9d493a6bb789615a14480a118598aa4" Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.699981 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lwl5b" Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.720052 4661 scope.go:117] "RemoveContainer" containerID="9c4cbf626cbf5ad2c2b7dc37470786e7ede949ac95e1b6ab153eaa92919bff2c" Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.768663 4661 scope.go:117] "RemoveContainer" containerID="625610b92685c0bd7f4b39c2d6c921c27675097fe7b49d2718631f4dbef90feb" Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.773979 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lwl5b"] Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.784959 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lwl5b"] Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.839908 4661 scope.go:117] "RemoveContainer" containerID="bc059051fa9c18d5dcf40b5d32e43d27b9d493a6bb789615a14480a118598aa4" Oct 01 05:54:28 crc kubenswrapper[4661]: E1001 05:54:28.842496 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc059051fa9c18d5dcf40b5d32e43d27b9d493a6bb789615a14480a118598aa4\": container with ID starting with bc059051fa9c18d5dcf40b5d32e43d27b9d493a6bb789615a14480a118598aa4 not found: ID does not exist" containerID="bc059051fa9c18d5dcf40b5d32e43d27b9d493a6bb789615a14480a118598aa4" Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.842552 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc059051fa9c18d5dcf40b5d32e43d27b9d493a6bb789615a14480a118598aa4"} err="failed to get container status \"bc059051fa9c18d5dcf40b5d32e43d27b9d493a6bb789615a14480a118598aa4\": rpc error: code = NotFound desc = could not find container \"bc059051fa9c18d5dcf40b5d32e43d27b9d493a6bb789615a14480a118598aa4\": container with ID starting with bc059051fa9c18d5dcf40b5d32e43d27b9d493a6bb789615a14480a118598aa4 not found: ID does not exist" Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.842589 4661 scope.go:117] "RemoveContainer" containerID="9c4cbf626cbf5ad2c2b7dc37470786e7ede949ac95e1b6ab153eaa92919bff2c" Oct 01 05:54:28 crc kubenswrapper[4661]: E1001 05:54:28.844417 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c4cbf626cbf5ad2c2b7dc37470786e7ede949ac95e1b6ab153eaa92919bff2c\": container with ID starting with 9c4cbf626cbf5ad2c2b7dc37470786e7ede949ac95e1b6ab153eaa92919bff2c not found: ID does not exist" containerID="9c4cbf626cbf5ad2c2b7dc37470786e7ede949ac95e1b6ab153eaa92919bff2c" Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.844462 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c4cbf626cbf5ad2c2b7dc37470786e7ede949ac95e1b6ab153eaa92919bff2c"} err="failed to get container status \"9c4cbf626cbf5ad2c2b7dc37470786e7ede949ac95e1b6ab153eaa92919bff2c\": rpc error: code = NotFound desc = could not find container \"9c4cbf626cbf5ad2c2b7dc37470786e7ede949ac95e1b6ab153eaa92919bff2c\": container with ID starting with 9c4cbf626cbf5ad2c2b7dc37470786e7ede949ac95e1b6ab153eaa92919bff2c not found: ID does not exist" Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.844490 4661 scope.go:117] "RemoveContainer" containerID="625610b92685c0bd7f4b39c2d6c921c27675097fe7b49d2718631f4dbef90feb" Oct 01 05:54:28 crc kubenswrapper[4661]: E1001 05:54:28.844928 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"625610b92685c0bd7f4b39c2d6c921c27675097fe7b49d2718631f4dbef90feb\": container with ID starting with 625610b92685c0bd7f4b39c2d6c921c27675097fe7b49d2718631f4dbef90feb not found: ID does not exist" containerID="625610b92685c0bd7f4b39c2d6c921c27675097fe7b49d2718631f4dbef90feb" Oct 01 05:54:28 crc kubenswrapper[4661]: I1001 05:54:28.844979 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"625610b92685c0bd7f4b39c2d6c921c27675097fe7b49d2718631f4dbef90feb"} err="failed to get container status \"625610b92685c0bd7f4b39c2d6c921c27675097fe7b49d2718631f4dbef90feb\": rpc error: code = NotFound desc = could not find container \"625610b92685c0bd7f4b39c2d6c921c27675097fe7b49d2718631f4dbef90feb\": container with ID starting with 625610b92685c0bd7f4b39c2d6c921c27675097fe7b49d2718631f4dbef90feb not found: ID does not exist" Oct 01 05:54:28 crc kubenswrapper[4661]: E1001 05:54:28.943498 4661 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod115a5a71_b424_4d1e_bd84_4ba4f3fd8321.slice/crio-0128d3487a2930d0fb6959565acf05a8b7ed67410569e1cf07653036f6db6a30\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod115a5a71_b424_4d1e_bd84_4ba4f3fd8321.slice\": RecentStats: unable to find data in memory cache]" Oct 01 05:54:29 crc kubenswrapper[4661]: I1001 05:54:29.798184 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="115a5a71-b424-4d1e-bd84-4ba4f3fd8321" path="/var/lib/kubelet/pods/115a5a71-b424-4d1e-bd84-4ba4f3fd8321/volumes" Oct 01 05:54:34 crc kubenswrapper[4661]: I1001 05:54:34.309461 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 05:54:34 crc kubenswrapper[4661]: I1001 05:54:34.310220 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 05:54:34 crc kubenswrapper[4661]: I1001 05:54:34.310274 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 05:54:34 crc kubenswrapper[4661]: I1001 05:54:34.310863 4661 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad"} pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 05:54:34 crc kubenswrapper[4661]: I1001 05:54:34.310918 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" containerID="cri-o://d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" gracePeriod=600 Oct 01 05:54:34 crc kubenswrapper[4661]: E1001 05:54:34.456482 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:54:34 crc kubenswrapper[4661]: I1001 05:54:34.791122 4661 generic.go:334] "Generic (PLEG): container finished" podID="7584c4bc-4202-487e-a2b4-4319f428a792" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" exitCode=0 Oct 01 05:54:34 crc kubenswrapper[4661]: I1001 05:54:34.791237 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerDied","Data":"d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad"} Oct 01 05:54:34 crc kubenswrapper[4661]: I1001 05:54:34.791341 4661 scope.go:117] "RemoveContainer" containerID="84a53173a3a65408f44fe54c0406b8dc34eed2d6cf16c9dfba93d9e6d05a09bf" Oct 01 05:54:34 crc kubenswrapper[4661]: I1001 05:54:34.793379 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:54:34 crc kubenswrapper[4661]: E1001 05:54:34.794852 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:54:47 crc kubenswrapper[4661]: I1001 05:54:47.757982 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:54:47 crc kubenswrapper[4661]: E1001 05:54:47.759626 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:55:01 crc kubenswrapper[4661]: I1001 05:55:01.767704 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:55:01 crc kubenswrapper[4661]: E1001 05:55:01.768604 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:55:14 crc kubenswrapper[4661]: I1001 05:55:14.757558 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:55:14 crc kubenswrapper[4661]: E1001 05:55:14.758923 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:55:18 crc kubenswrapper[4661]: I1001 05:55:18.820072 4661 scope.go:117] "RemoveContainer" containerID="8d55b6e6f80431fbc2cbf64f46c047797ee3eca6b9527684fcf09c807c26f708" Oct 01 05:55:18 crc kubenswrapper[4661]: I1001 05:55:18.857205 4661 scope.go:117] "RemoveContainer" containerID="dfc3c198ca949bf63825c762f4853db619eca77644921c6a42f19fabad290419" Oct 01 05:55:18 crc kubenswrapper[4661]: I1001 05:55:18.891434 4661 scope.go:117] "RemoveContainer" containerID="3bc71c128935810138bfd2a17cd2b83202647bf4cd29a94298f45ee032f425b0" Oct 01 05:55:18 crc kubenswrapper[4661]: I1001 05:55:18.932663 4661 scope.go:117] "RemoveContainer" containerID="0bfb35e257792c466c1e295e92f7a10a2b68a9df84fc4411291b08723c40f014" Oct 01 05:55:27 crc kubenswrapper[4661]: I1001 05:55:27.757969 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:55:27 crc kubenswrapper[4661]: E1001 05:55:27.759211 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:55:39 crc kubenswrapper[4661]: I1001 05:55:39.757546 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:55:39 crc kubenswrapper[4661]: E1001 05:55:39.758745 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:55:44 crc kubenswrapper[4661]: I1001 05:55:44.807003 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2vlrs"] Oct 01 05:55:44 crc kubenswrapper[4661]: E1001 05:55:44.808150 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="115a5a71-b424-4d1e-bd84-4ba4f3fd8321" containerName="registry-server" Oct 01 05:55:44 crc kubenswrapper[4661]: I1001 05:55:44.808168 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="115a5a71-b424-4d1e-bd84-4ba4f3fd8321" containerName="registry-server" Oct 01 05:55:44 crc kubenswrapper[4661]: E1001 05:55:44.808207 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="115a5a71-b424-4d1e-bd84-4ba4f3fd8321" containerName="extract-content" Oct 01 05:55:44 crc kubenswrapper[4661]: I1001 05:55:44.808218 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="115a5a71-b424-4d1e-bd84-4ba4f3fd8321" containerName="extract-content" Oct 01 05:55:44 crc kubenswrapper[4661]: E1001 05:55:44.808243 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="115a5a71-b424-4d1e-bd84-4ba4f3fd8321" containerName="extract-utilities" Oct 01 05:55:44 crc kubenswrapper[4661]: I1001 05:55:44.808251 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="115a5a71-b424-4d1e-bd84-4ba4f3fd8321" containerName="extract-utilities" Oct 01 05:55:44 crc kubenswrapper[4661]: I1001 05:55:44.808502 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="115a5a71-b424-4d1e-bd84-4ba4f3fd8321" containerName="registry-server" Oct 01 05:55:44 crc kubenswrapper[4661]: I1001 05:55:44.810857 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2vlrs" Oct 01 05:55:44 crc kubenswrapper[4661]: I1001 05:55:44.832259 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2vlrs"] Oct 01 05:55:44 crc kubenswrapper[4661]: I1001 05:55:44.838288 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/411367fd-0c10-4b1b-964f-68fa66317635-utilities\") pod \"certified-operators-2vlrs\" (UID: \"411367fd-0c10-4b1b-964f-68fa66317635\") " pod="openshift-marketplace/certified-operators-2vlrs" Oct 01 05:55:44 crc kubenswrapper[4661]: I1001 05:55:44.838799 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdh29\" (UniqueName: \"kubernetes.io/projected/411367fd-0c10-4b1b-964f-68fa66317635-kube-api-access-vdh29\") pod \"certified-operators-2vlrs\" (UID: \"411367fd-0c10-4b1b-964f-68fa66317635\") " pod="openshift-marketplace/certified-operators-2vlrs" Oct 01 05:55:44 crc kubenswrapper[4661]: I1001 05:55:44.838963 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/411367fd-0c10-4b1b-964f-68fa66317635-catalog-content\") pod \"certified-operators-2vlrs\" (UID: \"411367fd-0c10-4b1b-964f-68fa66317635\") " pod="openshift-marketplace/certified-operators-2vlrs" Oct 01 05:55:44 crc kubenswrapper[4661]: I1001 05:55:44.940231 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/411367fd-0c10-4b1b-964f-68fa66317635-utilities\") pod \"certified-operators-2vlrs\" (UID: \"411367fd-0c10-4b1b-964f-68fa66317635\") " pod="openshift-marketplace/certified-operators-2vlrs" Oct 01 05:55:44 crc kubenswrapper[4661]: I1001 05:55:44.940350 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdh29\" (UniqueName: \"kubernetes.io/projected/411367fd-0c10-4b1b-964f-68fa66317635-kube-api-access-vdh29\") pod \"certified-operators-2vlrs\" (UID: \"411367fd-0c10-4b1b-964f-68fa66317635\") " pod="openshift-marketplace/certified-operators-2vlrs" Oct 01 05:55:44 crc kubenswrapper[4661]: I1001 05:55:44.940378 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/411367fd-0c10-4b1b-964f-68fa66317635-catalog-content\") pod \"certified-operators-2vlrs\" (UID: \"411367fd-0c10-4b1b-964f-68fa66317635\") " pod="openshift-marketplace/certified-operators-2vlrs" Oct 01 05:55:44 crc kubenswrapper[4661]: I1001 05:55:44.940728 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/411367fd-0c10-4b1b-964f-68fa66317635-utilities\") pod \"certified-operators-2vlrs\" (UID: \"411367fd-0c10-4b1b-964f-68fa66317635\") " pod="openshift-marketplace/certified-operators-2vlrs" Oct 01 05:55:44 crc kubenswrapper[4661]: I1001 05:55:44.940809 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/411367fd-0c10-4b1b-964f-68fa66317635-catalog-content\") pod \"certified-operators-2vlrs\" (UID: \"411367fd-0c10-4b1b-964f-68fa66317635\") " pod="openshift-marketplace/certified-operators-2vlrs" Oct 01 05:55:44 crc kubenswrapper[4661]: I1001 05:55:44.974338 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdh29\" (UniqueName: \"kubernetes.io/projected/411367fd-0c10-4b1b-964f-68fa66317635-kube-api-access-vdh29\") pod \"certified-operators-2vlrs\" (UID: \"411367fd-0c10-4b1b-964f-68fa66317635\") " pod="openshift-marketplace/certified-operators-2vlrs" Oct 01 05:55:45 crc kubenswrapper[4661]: I1001 05:55:45.141076 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2vlrs" Oct 01 05:55:45 crc kubenswrapper[4661]: I1001 05:55:45.667595 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2vlrs"] Oct 01 05:55:45 crc kubenswrapper[4661]: W1001 05:55:45.671599 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod411367fd_0c10_4b1b_964f_68fa66317635.slice/crio-cb425250b608e1375b6cd72276cb3c6eda9a4fb454ac97e74c8c074b49b911a2 WatchSource:0}: Error finding container cb425250b608e1375b6cd72276cb3c6eda9a4fb454ac97e74c8c074b49b911a2: Status 404 returned error can't find the container with id cb425250b608e1375b6cd72276cb3c6eda9a4fb454ac97e74c8c074b49b911a2 Oct 01 05:55:45 crc kubenswrapper[4661]: I1001 05:55:45.797313 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2vlrs" event={"ID":"411367fd-0c10-4b1b-964f-68fa66317635","Type":"ContainerStarted","Data":"cb425250b608e1375b6cd72276cb3c6eda9a4fb454ac97e74c8c074b49b911a2"} Oct 01 05:55:46 crc kubenswrapper[4661]: I1001 05:55:46.812557 4661 generic.go:334] "Generic (PLEG): container finished" podID="411367fd-0c10-4b1b-964f-68fa66317635" containerID="373b6a51d01ee246ef47aff5e317481b37898efa2d02248d59fd5be79bac02f4" exitCode=0 Oct 01 05:55:46 crc kubenswrapper[4661]: I1001 05:55:46.812807 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2vlrs" event={"ID":"411367fd-0c10-4b1b-964f-68fa66317635","Type":"ContainerDied","Data":"373b6a51d01ee246ef47aff5e317481b37898efa2d02248d59fd5be79bac02f4"} Oct 01 05:55:48 crc kubenswrapper[4661]: I1001 05:55:48.591598 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-f2qwf"] Oct 01 05:55:48 crc kubenswrapper[4661]: I1001 05:55:48.595433 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f2qwf" Oct 01 05:55:48 crc kubenswrapper[4661]: I1001 05:55:48.612258 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f2qwf"] Oct 01 05:55:48 crc kubenswrapper[4661]: I1001 05:55:48.723423 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kb9mp\" (UniqueName: \"kubernetes.io/projected/f1328df6-0e3e-4d3a-a93e-bef8d8608f7f-kube-api-access-kb9mp\") pod \"redhat-marketplace-f2qwf\" (UID: \"f1328df6-0e3e-4d3a-a93e-bef8d8608f7f\") " pod="openshift-marketplace/redhat-marketplace-f2qwf" Oct 01 05:55:48 crc kubenswrapper[4661]: I1001 05:55:48.723747 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1328df6-0e3e-4d3a-a93e-bef8d8608f7f-catalog-content\") pod \"redhat-marketplace-f2qwf\" (UID: \"f1328df6-0e3e-4d3a-a93e-bef8d8608f7f\") " pod="openshift-marketplace/redhat-marketplace-f2qwf" Oct 01 05:55:48 crc kubenswrapper[4661]: I1001 05:55:48.723819 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1328df6-0e3e-4d3a-a93e-bef8d8608f7f-utilities\") pod \"redhat-marketplace-f2qwf\" (UID: \"f1328df6-0e3e-4d3a-a93e-bef8d8608f7f\") " pod="openshift-marketplace/redhat-marketplace-f2qwf" Oct 01 05:55:48 crc kubenswrapper[4661]: I1001 05:55:48.825420 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1328df6-0e3e-4d3a-a93e-bef8d8608f7f-catalog-content\") pod \"redhat-marketplace-f2qwf\" (UID: \"f1328df6-0e3e-4d3a-a93e-bef8d8608f7f\") " pod="openshift-marketplace/redhat-marketplace-f2qwf" Oct 01 05:55:48 crc kubenswrapper[4661]: I1001 05:55:48.825468 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1328df6-0e3e-4d3a-a93e-bef8d8608f7f-utilities\") pod \"redhat-marketplace-f2qwf\" (UID: \"f1328df6-0e3e-4d3a-a93e-bef8d8608f7f\") " pod="openshift-marketplace/redhat-marketplace-f2qwf" Oct 01 05:55:48 crc kubenswrapper[4661]: I1001 05:55:48.825548 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kb9mp\" (UniqueName: \"kubernetes.io/projected/f1328df6-0e3e-4d3a-a93e-bef8d8608f7f-kube-api-access-kb9mp\") pod \"redhat-marketplace-f2qwf\" (UID: \"f1328df6-0e3e-4d3a-a93e-bef8d8608f7f\") " pod="openshift-marketplace/redhat-marketplace-f2qwf" Oct 01 05:55:48 crc kubenswrapper[4661]: I1001 05:55:48.826189 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1328df6-0e3e-4d3a-a93e-bef8d8608f7f-catalog-content\") pod \"redhat-marketplace-f2qwf\" (UID: \"f1328df6-0e3e-4d3a-a93e-bef8d8608f7f\") " pod="openshift-marketplace/redhat-marketplace-f2qwf" Oct 01 05:55:48 crc kubenswrapper[4661]: I1001 05:55:48.826225 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1328df6-0e3e-4d3a-a93e-bef8d8608f7f-utilities\") pod \"redhat-marketplace-f2qwf\" (UID: \"f1328df6-0e3e-4d3a-a93e-bef8d8608f7f\") " pod="openshift-marketplace/redhat-marketplace-f2qwf" Oct 01 05:55:48 crc kubenswrapper[4661]: I1001 05:55:48.835526 4661 generic.go:334] "Generic (PLEG): container finished" podID="411367fd-0c10-4b1b-964f-68fa66317635" containerID="0aaad02b15ea069f3ddbfe88fcb99b20949ab585432c31d4a2979cadb695e488" exitCode=0 Oct 01 05:55:48 crc kubenswrapper[4661]: I1001 05:55:48.835564 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2vlrs" event={"ID":"411367fd-0c10-4b1b-964f-68fa66317635","Type":"ContainerDied","Data":"0aaad02b15ea069f3ddbfe88fcb99b20949ab585432c31d4a2979cadb695e488"} Oct 01 05:55:48 crc kubenswrapper[4661]: I1001 05:55:48.877552 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kb9mp\" (UniqueName: \"kubernetes.io/projected/f1328df6-0e3e-4d3a-a93e-bef8d8608f7f-kube-api-access-kb9mp\") pod \"redhat-marketplace-f2qwf\" (UID: \"f1328df6-0e3e-4d3a-a93e-bef8d8608f7f\") " pod="openshift-marketplace/redhat-marketplace-f2qwf" Oct 01 05:55:48 crc kubenswrapper[4661]: I1001 05:55:48.925328 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f2qwf" Oct 01 05:55:49 crc kubenswrapper[4661]: I1001 05:55:49.563047 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f2qwf"] Oct 01 05:55:49 crc kubenswrapper[4661]: W1001 05:55:49.570422 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1328df6_0e3e_4d3a_a93e_bef8d8608f7f.slice/crio-cd475d653c8cefb1d3c2b86be4c904ca82055c2a7650be07088200b4aa17bfe2 WatchSource:0}: Error finding container cd475d653c8cefb1d3c2b86be4c904ca82055c2a7650be07088200b4aa17bfe2: Status 404 returned error can't find the container with id cd475d653c8cefb1d3c2b86be4c904ca82055c2a7650be07088200b4aa17bfe2 Oct 01 05:55:49 crc kubenswrapper[4661]: I1001 05:55:49.848253 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f2qwf" event={"ID":"f1328df6-0e3e-4d3a-a93e-bef8d8608f7f","Type":"ContainerStarted","Data":"1fea4348847ed8a0b64783f0d97fc70ecfb203a566eb11e3037be5d04e915b0a"} Oct 01 05:55:49 crc kubenswrapper[4661]: I1001 05:55:49.850720 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f2qwf" event={"ID":"f1328df6-0e3e-4d3a-a93e-bef8d8608f7f","Type":"ContainerStarted","Data":"cd475d653c8cefb1d3c2b86be4c904ca82055c2a7650be07088200b4aa17bfe2"} Oct 01 05:55:49 crc kubenswrapper[4661]: I1001 05:55:49.853170 4661 generic.go:334] "Generic (PLEG): container finished" podID="bbd00e4d-aa89-4800-867e-d8f78c3d2c70" containerID="0a8772f209c77b1bc6402b3abb6ba77467dfa7504028871dc8a0a517b4379ba6" exitCode=0 Oct 01 05:55:49 crc kubenswrapper[4661]: I1001 05:55:49.853218 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6" event={"ID":"bbd00e4d-aa89-4800-867e-d8f78c3d2c70","Type":"ContainerDied","Data":"0a8772f209c77b1bc6402b3abb6ba77467dfa7504028871dc8a0a517b4379ba6"} Oct 01 05:55:49 crc kubenswrapper[4661]: I1001 05:55:49.855245 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2vlrs" event={"ID":"411367fd-0c10-4b1b-964f-68fa66317635","Type":"ContainerStarted","Data":"af3e20850b7ae940c569e1466a52c5688bbdb16ad09d8f30b0269e6359f394c8"} Oct 01 05:55:49 crc kubenswrapper[4661]: I1001 05:55:49.897304 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2vlrs" podStartSLOduration=3.4510553010000002 podStartE2EDuration="5.897287063s" podCreationTimestamp="2025-10-01 05:55:44 +0000 UTC" firstStartedPulling="2025-10-01 05:55:46.815264605 +0000 UTC m=+1595.753243249" lastFinishedPulling="2025-10-01 05:55:49.261496407 +0000 UTC m=+1598.199475011" observedRunningTime="2025-10-01 05:55:49.892234026 +0000 UTC m=+1598.830212640" watchObservedRunningTime="2025-10-01 05:55:49.897287063 +0000 UTC m=+1598.835265667" Oct 01 05:55:50 crc kubenswrapper[4661]: I1001 05:55:50.867899 4661 generic.go:334] "Generic (PLEG): container finished" podID="f1328df6-0e3e-4d3a-a93e-bef8d8608f7f" containerID="1fea4348847ed8a0b64783f0d97fc70ecfb203a566eb11e3037be5d04e915b0a" exitCode=0 Oct 01 05:55:50 crc kubenswrapper[4661]: I1001 05:55:50.868231 4661 generic.go:334] "Generic (PLEG): container finished" podID="f1328df6-0e3e-4d3a-a93e-bef8d8608f7f" containerID="b957c2823f07393ea32986fed554212c5466d6562b1ca7246a977c4f126d363a" exitCode=0 Oct 01 05:55:50 crc kubenswrapper[4661]: I1001 05:55:50.868097 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f2qwf" event={"ID":"f1328df6-0e3e-4d3a-a93e-bef8d8608f7f","Type":"ContainerDied","Data":"1fea4348847ed8a0b64783f0d97fc70ecfb203a566eb11e3037be5d04e915b0a"} Oct 01 05:55:50 crc kubenswrapper[4661]: I1001 05:55:50.868503 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f2qwf" event={"ID":"f1328df6-0e3e-4d3a-a93e-bef8d8608f7f","Type":"ContainerDied","Data":"b957c2823f07393ea32986fed554212c5466d6562b1ca7246a977c4f126d363a"} Oct 01 05:55:51 crc kubenswrapper[4661]: I1001 05:55:51.312393 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6" Oct 01 05:55:51 crc kubenswrapper[4661]: I1001 05:55:51.385278 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-inventory\") pod \"bbd00e4d-aa89-4800-867e-d8f78c3d2c70\" (UID: \"bbd00e4d-aa89-4800-867e-d8f78c3d2c70\") " Oct 01 05:55:51 crc kubenswrapper[4661]: I1001 05:55:51.385417 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ttzfl\" (UniqueName: \"kubernetes.io/projected/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-kube-api-access-ttzfl\") pod \"bbd00e4d-aa89-4800-867e-d8f78c3d2c70\" (UID: \"bbd00e4d-aa89-4800-867e-d8f78c3d2c70\") " Oct 01 05:55:51 crc kubenswrapper[4661]: I1001 05:55:51.385687 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-ssh-key\") pod \"bbd00e4d-aa89-4800-867e-d8f78c3d2c70\" (UID: \"bbd00e4d-aa89-4800-867e-d8f78c3d2c70\") " Oct 01 05:55:51 crc kubenswrapper[4661]: I1001 05:55:51.385789 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-bootstrap-combined-ca-bundle\") pod \"bbd00e4d-aa89-4800-867e-d8f78c3d2c70\" (UID: \"bbd00e4d-aa89-4800-867e-d8f78c3d2c70\") " Oct 01 05:55:51 crc kubenswrapper[4661]: I1001 05:55:51.394368 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "bbd00e4d-aa89-4800-867e-d8f78c3d2c70" (UID: "bbd00e4d-aa89-4800-867e-d8f78c3d2c70"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:55:51 crc kubenswrapper[4661]: I1001 05:55:51.417334 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-kube-api-access-ttzfl" (OuterVolumeSpecName: "kube-api-access-ttzfl") pod "bbd00e4d-aa89-4800-867e-d8f78c3d2c70" (UID: "bbd00e4d-aa89-4800-867e-d8f78c3d2c70"). InnerVolumeSpecName "kube-api-access-ttzfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:55:51 crc kubenswrapper[4661]: I1001 05:55:51.428865 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "bbd00e4d-aa89-4800-867e-d8f78c3d2c70" (UID: "bbd00e4d-aa89-4800-867e-d8f78c3d2c70"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:55:51 crc kubenswrapper[4661]: I1001 05:55:51.436502 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-inventory" (OuterVolumeSpecName: "inventory") pod "bbd00e4d-aa89-4800-867e-d8f78c3d2c70" (UID: "bbd00e4d-aa89-4800-867e-d8f78c3d2c70"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:55:51 crc kubenswrapper[4661]: I1001 05:55:51.488388 4661 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 05:55:51 crc kubenswrapper[4661]: I1001 05:55:51.488668 4661 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 05:55:51 crc kubenswrapper[4661]: I1001 05:55:51.488683 4661 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 05:55:51 crc kubenswrapper[4661]: I1001 05:55:51.488698 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ttzfl\" (UniqueName: \"kubernetes.io/projected/bbd00e4d-aa89-4800-867e-d8f78c3d2c70-kube-api-access-ttzfl\") on node \"crc\" DevicePath \"\"" Oct 01 05:55:51 crc kubenswrapper[4661]: I1001 05:55:51.772841 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:55:51 crc kubenswrapper[4661]: E1001 05:55:51.773365 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:55:51 crc kubenswrapper[4661]: I1001 05:55:51.886469 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f2qwf" event={"ID":"f1328df6-0e3e-4d3a-a93e-bef8d8608f7f","Type":"ContainerStarted","Data":"ab2ac3596b8d43b0579f21feadc16014c61bbdbf75c8fdd4d93591707f85e70b"} Oct 01 05:55:51 crc kubenswrapper[4661]: I1001 05:55:51.893089 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6" event={"ID":"bbd00e4d-aa89-4800-867e-d8f78c3d2c70","Type":"ContainerDied","Data":"c7310715e4bbeef7bb7f9e11692f549c8509410c45d3077a49a2dd67d1c327d3"} Oct 01 05:55:51 crc kubenswrapper[4661]: I1001 05:55:51.893160 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c7310715e4bbeef7bb7f9e11692f549c8509410c45d3077a49a2dd67d1c327d3" Oct 01 05:55:51 crc kubenswrapper[4661]: I1001 05:55:51.893209 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6" Oct 01 05:55:51 crc kubenswrapper[4661]: I1001 05:55:51.952742 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-f2qwf" podStartSLOduration=2.292444549 podStartE2EDuration="3.952717749s" podCreationTimestamp="2025-10-01 05:55:48 +0000 UTC" firstStartedPulling="2025-10-01 05:55:49.851308732 +0000 UTC m=+1598.789287346" lastFinishedPulling="2025-10-01 05:55:51.511581922 +0000 UTC m=+1600.449560546" observedRunningTime="2025-10-01 05:55:51.911357853 +0000 UTC m=+1600.849336507" watchObservedRunningTime="2025-10-01 05:55:51.952717749 +0000 UTC m=+1600.890696373" Oct 01 05:55:52 crc kubenswrapper[4661]: I1001 05:55:52.016733 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk"] Oct 01 05:55:52 crc kubenswrapper[4661]: E1001 05:55:52.017133 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbd00e4d-aa89-4800-867e-d8f78c3d2c70" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 01 05:55:52 crc kubenswrapper[4661]: I1001 05:55:52.017151 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbd00e4d-aa89-4800-867e-d8f78c3d2c70" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 01 05:55:52 crc kubenswrapper[4661]: I1001 05:55:52.017341 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbd00e4d-aa89-4800-867e-d8f78c3d2c70" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 01 05:55:52 crc kubenswrapper[4661]: I1001 05:55:52.017943 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk" Oct 01 05:55:52 crc kubenswrapper[4661]: I1001 05:55:52.020857 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 05:55:52 crc kubenswrapper[4661]: I1001 05:55:52.021178 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 05:55:52 crc kubenswrapper[4661]: I1001 05:55:52.021365 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-srk7f" Oct 01 05:55:52 crc kubenswrapper[4661]: I1001 05:55:52.021550 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 05:55:52 crc kubenswrapper[4661]: I1001 05:55:52.047082 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk"] Oct 01 05:55:52 crc kubenswrapper[4661]: I1001 05:55:52.125254 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a68882a5-f8f7-40a2-8406-409452df5dc5-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk\" (UID: \"a68882a5-f8f7-40a2-8406-409452df5dc5\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk" Oct 01 05:55:52 crc kubenswrapper[4661]: I1001 05:55:52.125333 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9nf8\" (UniqueName: \"kubernetes.io/projected/a68882a5-f8f7-40a2-8406-409452df5dc5-kube-api-access-k9nf8\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk\" (UID: \"a68882a5-f8f7-40a2-8406-409452df5dc5\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk" Oct 01 05:55:52 crc kubenswrapper[4661]: I1001 05:55:52.125409 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a68882a5-f8f7-40a2-8406-409452df5dc5-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk\" (UID: \"a68882a5-f8f7-40a2-8406-409452df5dc5\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk" Oct 01 05:55:52 crc kubenswrapper[4661]: I1001 05:55:52.227309 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a68882a5-f8f7-40a2-8406-409452df5dc5-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk\" (UID: \"a68882a5-f8f7-40a2-8406-409452df5dc5\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk" Oct 01 05:55:52 crc kubenswrapper[4661]: I1001 05:55:52.227607 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a68882a5-f8f7-40a2-8406-409452df5dc5-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk\" (UID: \"a68882a5-f8f7-40a2-8406-409452df5dc5\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk" Oct 01 05:55:52 crc kubenswrapper[4661]: I1001 05:55:52.227725 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9nf8\" (UniqueName: \"kubernetes.io/projected/a68882a5-f8f7-40a2-8406-409452df5dc5-kube-api-access-k9nf8\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk\" (UID: \"a68882a5-f8f7-40a2-8406-409452df5dc5\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk" Oct 01 05:55:52 crc kubenswrapper[4661]: I1001 05:55:52.234246 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a68882a5-f8f7-40a2-8406-409452df5dc5-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk\" (UID: \"a68882a5-f8f7-40a2-8406-409452df5dc5\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk" Oct 01 05:55:52 crc kubenswrapper[4661]: I1001 05:55:52.242404 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a68882a5-f8f7-40a2-8406-409452df5dc5-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk\" (UID: \"a68882a5-f8f7-40a2-8406-409452df5dc5\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk" Oct 01 05:55:52 crc kubenswrapper[4661]: I1001 05:55:52.249319 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9nf8\" (UniqueName: \"kubernetes.io/projected/a68882a5-f8f7-40a2-8406-409452df5dc5-kube-api-access-k9nf8\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk\" (UID: \"a68882a5-f8f7-40a2-8406-409452df5dc5\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk" Oct 01 05:55:52 crc kubenswrapper[4661]: I1001 05:55:52.336110 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk" Oct 01 05:55:52 crc kubenswrapper[4661]: W1001 05:55:52.890557 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda68882a5_f8f7_40a2_8406_409452df5dc5.slice/crio-3fa28ed23b931439b75846c4f916e664f36a118948ee124ca71e688db6b85df7 WatchSource:0}: Error finding container 3fa28ed23b931439b75846c4f916e664f36a118948ee124ca71e688db6b85df7: Status 404 returned error can't find the container with id 3fa28ed23b931439b75846c4f916e664f36a118948ee124ca71e688db6b85df7 Oct 01 05:55:52 crc kubenswrapper[4661]: I1001 05:55:52.892521 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk"] Oct 01 05:55:52 crc kubenswrapper[4661]: I1001 05:55:52.905906 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk" event={"ID":"a68882a5-f8f7-40a2-8406-409452df5dc5","Type":"ContainerStarted","Data":"3fa28ed23b931439b75846c4f916e664f36a118948ee124ca71e688db6b85df7"} Oct 01 05:55:53 crc kubenswrapper[4661]: I1001 05:55:53.916953 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk" event={"ID":"a68882a5-f8f7-40a2-8406-409452df5dc5","Type":"ContainerStarted","Data":"23dbc6be5eb827d4bdc98831c9540292a8fbb067313ddeb909dcf3a7e59fbcfa"} Oct 01 05:55:53 crc kubenswrapper[4661]: I1001 05:55:53.935730 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk" podStartSLOduration=2.387011878 podStartE2EDuration="2.935705143s" podCreationTimestamp="2025-10-01 05:55:51 +0000 UTC" firstStartedPulling="2025-10-01 05:55:52.893584438 +0000 UTC m=+1601.831563052" lastFinishedPulling="2025-10-01 05:55:53.442277663 +0000 UTC m=+1602.380256317" observedRunningTime="2025-10-01 05:55:53.930914512 +0000 UTC m=+1602.868893156" watchObservedRunningTime="2025-10-01 05:55:53.935705143 +0000 UTC m=+1602.873683797" Oct 01 05:55:55 crc kubenswrapper[4661]: I1001 05:55:55.141329 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2vlrs" Oct 01 05:55:55 crc kubenswrapper[4661]: I1001 05:55:55.141375 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2vlrs" Oct 01 05:55:55 crc kubenswrapper[4661]: I1001 05:55:55.207950 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2vlrs" Oct 01 05:55:56 crc kubenswrapper[4661]: I1001 05:55:56.014134 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2vlrs" Oct 01 05:55:58 crc kubenswrapper[4661]: I1001 05:55:58.925686 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-f2qwf" Oct 01 05:55:58 crc kubenswrapper[4661]: I1001 05:55:58.926787 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-f2qwf" Oct 01 05:55:59 crc kubenswrapper[4661]: I1001 05:55:59.005773 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2vlrs"] Oct 01 05:55:59 crc kubenswrapper[4661]: I1001 05:55:59.006179 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2vlrs" podUID="411367fd-0c10-4b1b-964f-68fa66317635" containerName="registry-server" containerID="cri-o://af3e20850b7ae940c569e1466a52c5688bbdb16ad09d8f30b0269e6359f394c8" gracePeriod=2 Oct 01 05:55:59 crc kubenswrapper[4661]: I1001 05:55:59.025126 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-f2qwf" Oct 01 05:55:59 crc kubenswrapper[4661]: I1001 05:55:59.093228 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-f2qwf" Oct 01 05:55:59 crc kubenswrapper[4661]: I1001 05:55:59.515129 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2vlrs" Oct 01 05:55:59 crc kubenswrapper[4661]: I1001 05:55:59.598931 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/411367fd-0c10-4b1b-964f-68fa66317635-catalog-content\") pod \"411367fd-0c10-4b1b-964f-68fa66317635\" (UID: \"411367fd-0c10-4b1b-964f-68fa66317635\") " Oct 01 05:55:59 crc kubenswrapper[4661]: I1001 05:55:59.599093 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdh29\" (UniqueName: \"kubernetes.io/projected/411367fd-0c10-4b1b-964f-68fa66317635-kube-api-access-vdh29\") pod \"411367fd-0c10-4b1b-964f-68fa66317635\" (UID: \"411367fd-0c10-4b1b-964f-68fa66317635\") " Oct 01 05:55:59 crc kubenswrapper[4661]: I1001 05:55:59.599141 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/411367fd-0c10-4b1b-964f-68fa66317635-utilities\") pod \"411367fd-0c10-4b1b-964f-68fa66317635\" (UID: \"411367fd-0c10-4b1b-964f-68fa66317635\") " Oct 01 05:55:59 crc kubenswrapper[4661]: I1001 05:55:59.600150 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/411367fd-0c10-4b1b-964f-68fa66317635-utilities" (OuterVolumeSpecName: "utilities") pod "411367fd-0c10-4b1b-964f-68fa66317635" (UID: "411367fd-0c10-4b1b-964f-68fa66317635"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:55:59 crc kubenswrapper[4661]: I1001 05:55:59.604510 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/411367fd-0c10-4b1b-964f-68fa66317635-kube-api-access-vdh29" (OuterVolumeSpecName: "kube-api-access-vdh29") pod "411367fd-0c10-4b1b-964f-68fa66317635" (UID: "411367fd-0c10-4b1b-964f-68fa66317635"). InnerVolumeSpecName "kube-api-access-vdh29". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:55:59 crc kubenswrapper[4661]: I1001 05:55:59.636842 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/411367fd-0c10-4b1b-964f-68fa66317635-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "411367fd-0c10-4b1b-964f-68fa66317635" (UID: "411367fd-0c10-4b1b-964f-68fa66317635"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:55:59 crc kubenswrapper[4661]: I1001 05:55:59.701127 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/411367fd-0c10-4b1b-964f-68fa66317635-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 05:55:59 crc kubenswrapper[4661]: I1001 05:55:59.701179 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdh29\" (UniqueName: \"kubernetes.io/projected/411367fd-0c10-4b1b-964f-68fa66317635-kube-api-access-vdh29\") on node \"crc\" DevicePath \"\"" Oct 01 05:55:59 crc kubenswrapper[4661]: I1001 05:55:59.701193 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/411367fd-0c10-4b1b-964f-68fa66317635-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 05:56:00 crc kubenswrapper[4661]: I1001 05:56:00.007661 4661 generic.go:334] "Generic (PLEG): container finished" podID="411367fd-0c10-4b1b-964f-68fa66317635" containerID="af3e20850b7ae940c569e1466a52c5688bbdb16ad09d8f30b0269e6359f394c8" exitCode=0 Oct 01 05:56:00 crc kubenswrapper[4661]: I1001 05:56:00.007705 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2vlrs" event={"ID":"411367fd-0c10-4b1b-964f-68fa66317635","Type":"ContainerDied","Data":"af3e20850b7ae940c569e1466a52c5688bbdb16ad09d8f30b0269e6359f394c8"} Oct 01 05:56:00 crc kubenswrapper[4661]: I1001 05:56:00.008387 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2vlrs" event={"ID":"411367fd-0c10-4b1b-964f-68fa66317635","Type":"ContainerDied","Data":"cb425250b608e1375b6cd72276cb3c6eda9a4fb454ac97e74c8c074b49b911a2"} Oct 01 05:56:00 crc kubenswrapper[4661]: I1001 05:56:00.008414 4661 scope.go:117] "RemoveContainer" containerID="af3e20850b7ae940c569e1466a52c5688bbdb16ad09d8f30b0269e6359f394c8" Oct 01 05:56:00 crc kubenswrapper[4661]: I1001 05:56:00.007775 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2vlrs" Oct 01 05:56:00 crc kubenswrapper[4661]: I1001 05:56:00.057692 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2vlrs"] Oct 01 05:56:00 crc kubenswrapper[4661]: I1001 05:56:00.062163 4661 scope.go:117] "RemoveContainer" containerID="0aaad02b15ea069f3ddbfe88fcb99b20949ab585432c31d4a2979cadb695e488" Oct 01 05:56:00 crc kubenswrapper[4661]: I1001 05:56:00.068582 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2vlrs"] Oct 01 05:56:00 crc kubenswrapper[4661]: I1001 05:56:00.102997 4661 scope.go:117] "RemoveContainer" containerID="373b6a51d01ee246ef47aff5e317481b37898efa2d02248d59fd5be79bac02f4" Oct 01 05:56:00 crc kubenswrapper[4661]: I1001 05:56:00.142186 4661 scope.go:117] "RemoveContainer" containerID="af3e20850b7ae940c569e1466a52c5688bbdb16ad09d8f30b0269e6359f394c8" Oct 01 05:56:00 crc kubenswrapper[4661]: E1001 05:56:00.142804 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af3e20850b7ae940c569e1466a52c5688bbdb16ad09d8f30b0269e6359f394c8\": container with ID starting with af3e20850b7ae940c569e1466a52c5688bbdb16ad09d8f30b0269e6359f394c8 not found: ID does not exist" containerID="af3e20850b7ae940c569e1466a52c5688bbdb16ad09d8f30b0269e6359f394c8" Oct 01 05:56:00 crc kubenswrapper[4661]: I1001 05:56:00.142859 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af3e20850b7ae940c569e1466a52c5688bbdb16ad09d8f30b0269e6359f394c8"} err="failed to get container status \"af3e20850b7ae940c569e1466a52c5688bbdb16ad09d8f30b0269e6359f394c8\": rpc error: code = NotFound desc = could not find container \"af3e20850b7ae940c569e1466a52c5688bbdb16ad09d8f30b0269e6359f394c8\": container with ID starting with af3e20850b7ae940c569e1466a52c5688bbdb16ad09d8f30b0269e6359f394c8 not found: ID does not exist" Oct 01 05:56:00 crc kubenswrapper[4661]: I1001 05:56:00.142893 4661 scope.go:117] "RemoveContainer" containerID="0aaad02b15ea069f3ddbfe88fcb99b20949ab585432c31d4a2979cadb695e488" Oct 01 05:56:00 crc kubenswrapper[4661]: E1001 05:56:00.143213 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0aaad02b15ea069f3ddbfe88fcb99b20949ab585432c31d4a2979cadb695e488\": container with ID starting with 0aaad02b15ea069f3ddbfe88fcb99b20949ab585432c31d4a2979cadb695e488 not found: ID does not exist" containerID="0aaad02b15ea069f3ddbfe88fcb99b20949ab585432c31d4a2979cadb695e488" Oct 01 05:56:00 crc kubenswrapper[4661]: I1001 05:56:00.143256 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0aaad02b15ea069f3ddbfe88fcb99b20949ab585432c31d4a2979cadb695e488"} err="failed to get container status \"0aaad02b15ea069f3ddbfe88fcb99b20949ab585432c31d4a2979cadb695e488\": rpc error: code = NotFound desc = could not find container \"0aaad02b15ea069f3ddbfe88fcb99b20949ab585432c31d4a2979cadb695e488\": container with ID starting with 0aaad02b15ea069f3ddbfe88fcb99b20949ab585432c31d4a2979cadb695e488 not found: ID does not exist" Oct 01 05:56:00 crc kubenswrapper[4661]: I1001 05:56:00.143289 4661 scope.go:117] "RemoveContainer" containerID="373b6a51d01ee246ef47aff5e317481b37898efa2d02248d59fd5be79bac02f4" Oct 01 05:56:00 crc kubenswrapper[4661]: E1001 05:56:00.143693 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"373b6a51d01ee246ef47aff5e317481b37898efa2d02248d59fd5be79bac02f4\": container with ID starting with 373b6a51d01ee246ef47aff5e317481b37898efa2d02248d59fd5be79bac02f4 not found: ID does not exist" containerID="373b6a51d01ee246ef47aff5e317481b37898efa2d02248d59fd5be79bac02f4" Oct 01 05:56:00 crc kubenswrapper[4661]: I1001 05:56:00.143722 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"373b6a51d01ee246ef47aff5e317481b37898efa2d02248d59fd5be79bac02f4"} err="failed to get container status \"373b6a51d01ee246ef47aff5e317481b37898efa2d02248d59fd5be79bac02f4\": rpc error: code = NotFound desc = could not find container \"373b6a51d01ee246ef47aff5e317481b37898efa2d02248d59fd5be79bac02f4\": container with ID starting with 373b6a51d01ee246ef47aff5e317481b37898efa2d02248d59fd5be79bac02f4 not found: ID does not exist" Oct 01 05:56:01 crc kubenswrapper[4661]: I1001 05:56:01.771561 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="411367fd-0c10-4b1b-964f-68fa66317635" path="/var/lib/kubelet/pods/411367fd-0c10-4b1b-964f-68fa66317635/volumes" Oct 01 05:56:02 crc kubenswrapper[4661]: I1001 05:56:02.589145 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-f2qwf"] Oct 01 05:56:02 crc kubenswrapper[4661]: I1001 05:56:02.589490 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-f2qwf" podUID="f1328df6-0e3e-4d3a-a93e-bef8d8608f7f" containerName="registry-server" containerID="cri-o://ab2ac3596b8d43b0579f21feadc16014c61bbdbf75c8fdd4d93591707f85e70b" gracePeriod=2 Oct 01 05:56:03 crc kubenswrapper[4661]: I1001 05:56:03.039244 4661 generic.go:334] "Generic (PLEG): container finished" podID="f1328df6-0e3e-4d3a-a93e-bef8d8608f7f" containerID="ab2ac3596b8d43b0579f21feadc16014c61bbdbf75c8fdd4d93591707f85e70b" exitCode=0 Oct 01 05:56:03 crc kubenswrapper[4661]: I1001 05:56:03.039331 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f2qwf" event={"ID":"f1328df6-0e3e-4d3a-a93e-bef8d8608f7f","Type":"ContainerDied","Data":"ab2ac3596b8d43b0579f21feadc16014c61bbdbf75c8fdd4d93591707f85e70b"} Oct 01 05:56:03 crc kubenswrapper[4661]: I1001 05:56:03.039584 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f2qwf" event={"ID":"f1328df6-0e3e-4d3a-a93e-bef8d8608f7f","Type":"ContainerDied","Data":"cd475d653c8cefb1d3c2b86be4c904ca82055c2a7650be07088200b4aa17bfe2"} Oct 01 05:56:03 crc kubenswrapper[4661]: I1001 05:56:03.039597 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd475d653c8cefb1d3c2b86be4c904ca82055c2a7650be07088200b4aa17bfe2" Oct 01 05:56:03 crc kubenswrapper[4661]: I1001 05:56:03.149330 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f2qwf" Oct 01 05:56:03 crc kubenswrapper[4661]: I1001 05:56:03.214007 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1328df6-0e3e-4d3a-a93e-bef8d8608f7f-utilities\") pod \"f1328df6-0e3e-4d3a-a93e-bef8d8608f7f\" (UID: \"f1328df6-0e3e-4d3a-a93e-bef8d8608f7f\") " Oct 01 05:56:03 crc kubenswrapper[4661]: I1001 05:56:03.214265 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kb9mp\" (UniqueName: \"kubernetes.io/projected/f1328df6-0e3e-4d3a-a93e-bef8d8608f7f-kube-api-access-kb9mp\") pod \"f1328df6-0e3e-4d3a-a93e-bef8d8608f7f\" (UID: \"f1328df6-0e3e-4d3a-a93e-bef8d8608f7f\") " Oct 01 05:56:03 crc kubenswrapper[4661]: I1001 05:56:03.215601 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1328df6-0e3e-4d3a-a93e-bef8d8608f7f-utilities" (OuterVolumeSpecName: "utilities") pod "f1328df6-0e3e-4d3a-a93e-bef8d8608f7f" (UID: "f1328df6-0e3e-4d3a-a93e-bef8d8608f7f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:56:03 crc kubenswrapper[4661]: I1001 05:56:03.215763 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1328df6-0e3e-4d3a-a93e-bef8d8608f7f-catalog-content\") pod \"f1328df6-0e3e-4d3a-a93e-bef8d8608f7f\" (UID: \"f1328df6-0e3e-4d3a-a93e-bef8d8608f7f\") " Oct 01 05:56:03 crc kubenswrapper[4661]: I1001 05:56:03.216897 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1328df6-0e3e-4d3a-a93e-bef8d8608f7f-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 05:56:03 crc kubenswrapper[4661]: I1001 05:56:03.223042 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1328df6-0e3e-4d3a-a93e-bef8d8608f7f-kube-api-access-kb9mp" (OuterVolumeSpecName: "kube-api-access-kb9mp") pod "f1328df6-0e3e-4d3a-a93e-bef8d8608f7f" (UID: "f1328df6-0e3e-4d3a-a93e-bef8d8608f7f"). InnerVolumeSpecName "kube-api-access-kb9mp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:56:03 crc kubenswrapper[4661]: I1001 05:56:03.234310 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1328df6-0e3e-4d3a-a93e-bef8d8608f7f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f1328df6-0e3e-4d3a-a93e-bef8d8608f7f" (UID: "f1328df6-0e3e-4d3a-a93e-bef8d8608f7f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 05:56:03 crc kubenswrapper[4661]: I1001 05:56:03.318719 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kb9mp\" (UniqueName: \"kubernetes.io/projected/f1328df6-0e3e-4d3a-a93e-bef8d8608f7f-kube-api-access-kb9mp\") on node \"crc\" DevicePath \"\"" Oct 01 05:56:03 crc kubenswrapper[4661]: I1001 05:56:03.319109 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1328df6-0e3e-4d3a-a93e-bef8d8608f7f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 05:56:04 crc kubenswrapper[4661]: I1001 05:56:04.056162 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f2qwf" Oct 01 05:56:04 crc kubenswrapper[4661]: I1001 05:56:04.103742 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-f2qwf"] Oct 01 05:56:04 crc kubenswrapper[4661]: I1001 05:56:04.113705 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-f2qwf"] Oct 01 05:56:04 crc kubenswrapper[4661]: I1001 05:56:04.757264 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:56:04 crc kubenswrapper[4661]: E1001 05:56:04.757923 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:56:05 crc kubenswrapper[4661]: I1001 05:56:05.768175 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1328df6-0e3e-4d3a-a93e-bef8d8608f7f" path="/var/lib/kubelet/pods/f1328df6-0e3e-4d3a-a93e-bef8d8608f7f/volumes" Oct 01 05:56:18 crc kubenswrapper[4661]: I1001 05:56:18.063044 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-mttld"] Oct 01 05:56:18 crc kubenswrapper[4661]: I1001 05:56:18.077969 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-mttld"] Oct 01 05:56:19 crc kubenswrapper[4661]: I1001 05:56:19.076605 4661 scope.go:117] "RemoveContainer" containerID="fde785b55e762d033fa631fb6e4669eb7db71f7fbd0a9040850349258c6feadc" Oct 01 05:56:19 crc kubenswrapper[4661]: I1001 05:56:19.108379 4661 scope.go:117] "RemoveContainer" containerID="be0b2e70ed01ce29b794479aff4bdebc48a7399c396df1948b7415a936598030" Oct 01 05:56:19 crc kubenswrapper[4661]: I1001 05:56:19.145818 4661 scope.go:117] "RemoveContainer" containerID="319037cd7406a790025665fc0577d14227b3809f7fdca66bc36b95d28ac24dd3" Oct 01 05:56:19 crc kubenswrapper[4661]: I1001 05:56:19.180434 4661 scope.go:117] "RemoveContainer" containerID="9947c1cd36302c585eff1272a6e9eefeae64444a87cd392f9cc453b44c3b3034" Oct 01 05:56:19 crc kubenswrapper[4661]: I1001 05:56:19.217747 4661 scope.go:117] "RemoveContainer" containerID="d775f51e44aa55626189c97e6f609f9ed50b2d49938eae6f84affe455c0d4e34" Oct 01 05:56:19 crc kubenswrapper[4661]: I1001 05:56:19.757540 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:56:19 crc kubenswrapper[4661]: E1001 05:56:19.758416 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:56:19 crc kubenswrapper[4661]: I1001 05:56:19.778523 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56cf476f-5124-495f-b4b8-4899a31e4f63" path="/var/lib/kubelet/pods/56cf476f-5124-495f-b4b8-4899a31e4f63/volumes" Oct 01 05:56:20 crc kubenswrapper[4661]: I1001 05:56:20.044012 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-j2bph"] Oct 01 05:56:20 crc kubenswrapper[4661]: I1001 05:56:20.057978 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-v4b88"] Oct 01 05:56:20 crc kubenswrapper[4661]: I1001 05:56:20.067961 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-v4b88"] Oct 01 05:56:20 crc kubenswrapper[4661]: I1001 05:56:20.078129 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-j2bph"] Oct 01 05:56:21 crc kubenswrapper[4661]: I1001 05:56:21.781341 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a6cf6dd-8146-4582-a2be-3525b97f43fa" path="/var/lib/kubelet/pods/1a6cf6dd-8146-4582-a2be-3525b97f43fa/volumes" Oct 01 05:56:21 crc kubenswrapper[4661]: I1001 05:56:21.783843 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d068347-68e4-4719-8e0a-6a514729f385" path="/var/lib/kubelet/pods/4d068347-68e4-4719-8e0a-6a514729f385/volumes" Oct 01 05:56:22 crc kubenswrapper[4661]: I1001 05:56:22.033546 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-db-create-ppl9g"] Oct 01 05:56:22 crc kubenswrapper[4661]: I1001 05:56:22.044565 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-db-create-ppl9g"] Oct 01 05:56:23 crc kubenswrapper[4661]: I1001 05:56:23.776434 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c08e846-393c-4f97-904d-f0a5c89a33e5" path="/var/lib/kubelet/pods/3c08e846-393c-4f97-904d-f0a5c89a33e5/volumes" Oct 01 05:56:31 crc kubenswrapper[4661]: I1001 05:56:31.046367 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-8f3c-account-create-55d7t"] Oct 01 05:56:31 crc kubenswrapper[4661]: I1001 05:56:31.063138 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-8557-account-create-xgf4w"] Oct 01 05:56:31 crc kubenswrapper[4661]: I1001 05:56:31.078931 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-8f3c-account-create-55d7t"] Oct 01 05:56:31 crc kubenswrapper[4661]: I1001 05:56:31.091407 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-8557-account-create-xgf4w"] Oct 01 05:56:31 crc kubenswrapper[4661]: I1001 05:56:31.778704 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c05131a9-6d9f-486d-8f8d-6667e3e65506" path="/var/lib/kubelet/pods/c05131a9-6d9f-486d-8f8d-6667e3e65506/volumes" Oct 01 05:56:31 crc kubenswrapper[4661]: I1001 05:56:31.781374 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4ee810c-d045-46b4-bcb8-1c2490123d06" path="/var/lib/kubelet/pods/d4ee810c-d045-46b4-bcb8-1c2490123d06/volumes" Oct 01 05:56:32 crc kubenswrapper[4661]: I1001 05:56:32.756919 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:56:32 crc kubenswrapper[4661]: E1001 05:56:32.757208 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:56:39 crc kubenswrapper[4661]: I1001 05:56:39.059854 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-c0e5-account-create-nx68n"] Oct 01 05:56:39 crc kubenswrapper[4661]: I1001 05:56:39.079305 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-ca83-account-create-wxd5d"] Oct 01 05:56:39 crc kubenswrapper[4661]: I1001 05:56:39.088083 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-c0e5-account-create-nx68n"] Oct 01 05:56:39 crc kubenswrapper[4661]: I1001 05:56:39.097991 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-ca83-account-create-wxd5d"] Oct 01 05:56:39 crc kubenswrapper[4661]: I1001 05:56:39.777132 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f35ff2fe-28c0-4815-a674-7b063a959b28" path="/var/lib/kubelet/pods/f35ff2fe-28c0-4815-a674-7b063a959b28/volumes" Oct 01 05:56:39 crc kubenswrapper[4661]: I1001 05:56:39.778426 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc52c0b9-60dc-4c50-9022-d993371171ec" path="/var/lib/kubelet/pods/fc52c0b9-60dc-4c50-9022-d993371171ec/volumes" Oct 01 05:56:45 crc kubenswrapper[4661]: I1001 05:56:45.757932 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:56:45 crc kubenswrapper[4661]: E1001 05:56:45.758958 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:56:57 crc kubenswrapper[4661]: I1001 05:56:57.758667 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:56:57 crc kubenswrapper[4661]: E1001 05:56:57.760105 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:57:06 crc kubenswrapper[4661]: I1001 05:57:06.046020 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-jqk5r"] Oct 01 05:57:06 crc kubenswrapper[4661]: I1001 05:57:06.057106 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-x8c5b"] Oct 01 05:57:06 crc kubenswrapper[4661]: I1001 05:57:06.067507 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-4vbbg"] Oct 01 05:57:06 crc kubenswrapper[4661]: I1001 05:57:06.076557 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-x8c5b"] Oct 01 05:57:06 crc kubenswrapper[4661]: I1001 05:57:06.086146 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-4vbbg"] Oct 01 05:57:06 crc kubenswrapper[4661]: I1001 05:57:06.093444 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-jqk5r"] Oct 01 05:57:07 crc kubenswrapper[4661]: I1001 05:57:07.775983 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43bc7be9-b86f-4811-ae9f-f8c1898910e3" path="/var/lib/kubelet/pods/43bc7be9-b86f-4811-ae9f-f8c1898910e3/volumes" Oct 01 05:57:07 crc kubenswrapper[4661]: I1001 05:57:07.777179 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="450dd430-3c14-413f-ba17-a467a882deb5" path="/var/lib/kubelet/pods/450dd430-3c14-413f-ba17-a467a882deb5/volumes" Oct 01 05:57:07 crc kubenswrapper[4661]: I1001 05:57:07.777938 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da61731a-e57a-4977-b781-2742e48948bd" path="/var/lib/kubelet/pods/da61731a-e57a-4977-b781-2742e48948bd/volumes" Oct 01 05:57:09 crc kubenswrapper[4661]: I1001 05:57:09.757620 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:57:09 crc kubenswrapper[4661]: E1001 05:57:09.758297 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:57:14 crc kubenswrapper[4661]: I1001 05:57:14.040515 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-h5lwt"] Oct 01 05:57:14 crc kubenswrapper[4661]: I1001 05:57:14.055542 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-h5lwt"] Oct 01 05:57:15 crc kubenswrapper[4661]: I1001 05:57:15.796622 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6" path="/var/lib/kubelet/pods/d9ff8ea6-9373-4cb5-941b-ac46d4fac6d6/volumes" Oct 01 05:57:17 crc kubenswrapper[4661]: I1001 05:57:17.042688 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-8a27-account-create-wcdfl"] Oct 01 05:57:17 crc kubenswrapper[4661]: I1001 05:57:17.059430 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-8a27-account-create-wcdfl"] Oct 01 05:57:17 crc kubenswrapper[4661]: I1001 05:57:17.772064 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c241ad1-75a9-423e-942c-7e7965589bdc" path="/var/lib/kubelet/pods/6c241ad1-75a9-423e-942c-7e7965589bdc/volumes" Oct 01 05:57:19 crc kubenswrapper[4661]: I1001 05:57:19.427116 4661 scope.go:117] "RemoveContainer" containerID="e972e3b1fb007ffaa726df923cee4704571fd53fe9696a0072257c80ed170e09" Oct 01 05:57:19 crc kubenswrapper[4661]: I1001 05:57:19.462685 4661 scope.go:117] "RemoveContainer" containerID="258f546d48f21daee8f76b5a8f2a61544865a69cddc4f8fe22e431ef7c3e992b" Oct 01 05:57:19 crc kubenswrapper[4661]: I1001 05:57:19.550313 4661 scope.go:117] "RemoveContainer" containerID="5cb8ed685ba5e03c2e0b55c5fb97a9e8a1c638d574f449195cdb031d532cfd6a" Oct 01 05:57:19 crc kubenswrapper[4661]: I1001 05:57:19.624679 4661 scope.go:117] "RemoveContainer" containerID="2bac3f72ee1aac17e57ba63180c86a042583a1903d0259a8229234b58774ecc8" Oct 01 05:57:19 crc kubenswrapper[4661]: I1001 05:57:19.681434 4661 scope.go:117] "RemoveContainer" containerID="cad9a8e6d787f1ba2c6817d88205144472a94551541679b222c95f6eb77b97ff" Oct 01 05:57:19 crc kubenswrapper[4661]: I1001 05:57:19.716157 4661 scope.go:117] "RemoveContainer" containerID="fa5e8dccb39b0d77e514de26ef0f261a01fbc69e6ec0569683c6ea8af4a66637" Oct 01 05:57:19 crc kubenswrapper[4661]: I1001 05:57:19.791910 4661 scope.go:117] "RemoveContainer" containerID="689a4089c94bdf3f4f4224deb013107deb48b0e7125eb729cb6c30e5a31c1f61" Oct 01 05:57:19 crc kubenswrapper[4661]: I1001 05:57:19.840577 4661 scope.go:117] "RemoveContainer" containerID="4fd219f68f45baa29b0a78f6a3431221cdf2936d9690369a60d9b8c611b03285" Oct 01 05:57:19 crc kubenswrapper[4661]: I1001 05:57:19.890205 4661 scope.go:117] "RemoveContainer" containerID="1359f29d342cded5cf0e1cfde794a247fa8741949f3c7bc4fb8e7fb0ab27004f" Oct 01 05:57:19 crc kubenswrapper[4661]: I1001 05:57:19.977883 4661 scope.go:117] "RemoveContainer" containerID="0c31670bca15667c235057f0bfbaf3c18ce9e873557a8599037a44672796eed9" Oct 01 05:57:20 crc kubenswrapper[4661]: I1001 05:57:20.019625 4661 scope.go:117] "RemoveContainer" containerID="a6c6b7639a2547d364919b1640328a8e77276a0f4eefcc02b71f74dc06e80de9" Oct 01 05:57:20 crc kubenswrapper[4661]: I1001 05:57:20.029148 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-db-sync-st9fk"] Oct 01 05:57:20 crc kubenswrapper[4661]: I1001 05:57:20.040142 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-db-sync-st9fk"] Oct 01 05:57:20 crc kubenswrapper[4661]: I1001 05:57:20.051164 4661 scope.go:117] "RemoveContainer" containerID="dad4e7ec501be993ab367ab7f124033437f83dc1184c88ebdbc714b1dd4d44ff" Oct 01 05:57:20 crc kubenswrapper[4661]: I1001 05:57:20.086967 4661 scope.go:117] "RemoveContainer" containerID="a0f329ac31d84c6ebe1dba5e3f2b9377e7c946b59ca2b4dc7db376114e6aa43c" Oct 01 05:57:21 crc kubenswrapper[4661]: I1001 05:57:21.034096 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-qgcfj"] Oct 01 05:57:21 crc kubenswrapper[4661]: I1001 05:57:21.043376 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-qgcfj"] Oct 01 05:57:21 crc kubenswrapper[4661]: I1001 05:57:21.774510 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59992b65-645e-44c1-9cb4-c3ac1f0bf8da" path="/var/lib/kubelet/pods/59992b65-645e-44c1-9cb4-c3ac1f0bf8da/volumes" Oct 01 05:57:21 crc kubenswrapper[4661]: I1001 05:57:21.775300 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d52ab365-514d-4e8c-b246-717bc8a45c0a" path="/var/lib/kubelet/pods/d52ab365-514d-4e8c-b246-717bc8a45c0a/volumes" Oct 01 05:57:22 crc kubenswrapper[4661]: I1001 05:57:22.768600 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:57:22 crc kubenswrapper[4661]: E1001 05:57:22.770021 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:57:27 crc kubenswrapper[4661]: I1001 05:57:27.040237 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-a1c7-account-create-979gq"] Oct 01 05:57:27 crc kubenswrapper[4661]: I1001 05:57:27.059305 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-a7c0-account-create-qc4hg"] Oct 01 05:57:27 crc kubenswrapper[4661]: I1001 05:57:27.072840 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-a1c7-account-create-979gq"] Oct 01 05:57:27 crc kubenswrapper[4661]: I1001 05:57:27.082135 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-a7c0-account-create-qc4hg"] Oct 01 05:57:27 crc kubenswrapper[4661]: I1001 05:57:27.779864 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d174eeb-ff97-4090-9f9a-d30f97bd926e" path="/var/lib/kubelet/pods/4d174eeb-ff97-4090-9f9a-d30f97bd926e/volumes" Oct 01 05:57:27 crc kubenswrapper[4661]: I1001 05:57:27.780585 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e30e23d1-dd87-43e3-975f-38c9c67a63eb" path="/var/lib/kubelet/pods/e30e23d1-dd87-43e3-975f-38c9c67a63eb/volumes" Oct 01 05:57:34 crc kubenswrapper[4661]: I1001 05:57:34.756506 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:57:34 crc kubenswrapper[4661]: E1001 05:57:34.757365 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:57:48 crc kubenswrapper[4661]: I1001 05:57:48.757833 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:57:48 crc kubenswrapper[4661]: E1001 05:57:48.759042 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:57:50 crc kubenswrapper[4661]: I1001 05:57:50.063130 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-xp8s8"] Oct 01 05:57:50 crc kubenswrapper[4661]: I1001 05:57:50.072954 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-xp8s8"] Oct 01 05:57:51 crc kubenswrapper[4661]: I1001 05:57:51.778128 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="046ddcaa-ecd4-4b69-90ac-c721d4a60fc3" path="/var/lib/kubelet/pods/046ddcaa-ecd4-4b69-90ac-c721d4a60fc3/volumes" Oct 01 05:57:52 crc kubenswrapper[4661]: I1001 05:57:52.039241 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-6s9q7"] Oct 01 05:57:52 crc kubenswrapper[4661]: I1001 05:57:52.049424 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-6s9q7"] Oct 01 05:57:53 crc kubenswrapper[4661]: I1001 05:57:53.775019 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="000282a3-18a6-4ea3-8055-ae4ad12ed82f" path="/var/lib/kubelet/pods/000282a3-18a6-4ea3-8055-ae4ad12ed82f/volumes" Oct 01 05:58:02 crc kubenswrapper[4661]: I1001 05:58:02.647660 4661 generic.go:334] "Generic (PLEG): container finished" podID="a68882a5-f8f7-40a2-8406-409452df5dc5" containerID="23dbc6be5eb827d4bdc98831c9540292a8fbb067313ddeb909dcf3a7e59fbcfa" exitCode=0 Oct 01 05:58:02 crc kubenswrapper[4661]: I1001 05:58:02.647743 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk" event={"ID":"a68882a5-f8f7-40a2-8406-409452df5dc5","Type":"ContainerDied","Data":"23dbc6be5eb827d4bdc98831c9540292a8fbb067313ddeb909dcf3a7e59fbcfa"} Oct 01 05:58:03 crc kubenswrapper[4661]: I1001 05:58:03.758556 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:58:03 crc kubenswrapper[4661]: E1001 05:58:03.759507 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.116377 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.204376 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9nf8\" (UniqueName: \"kubernetes.io/projected/a68882a5-f8f7-40a2-8406-409452df5dc5-kube-api-access-k9nf8\") pod \"a68882a5-f8f7-40a2-8406-409452df5dc5\" (UID: \"a68882a5-f8f7-40a2-8406-409452df5dc5\") " Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.204521 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a68882a5-f8f7-40a2-8406-409452df5dc5-ssh-key\") pod \"a68882a5-f8f7-40a2-8406-409452df5dc5\" (UID: \"a68882a5-f8f7-40a2-8406-409452df5dc5\") " Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.204623 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a68882a5-f8f7-40a2-8406-409452df5dc5-inventory\") pod \"a68882a5-f8f7-40a2-8406-409452df5dc5\" (UID: \"a68882a5-f8f7-40a2-8406-409452df5dc5\") " Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.217099 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a68882a5-f8f7-40a2-8406-409452df5dc5-kube-api-access-k9nf8" (OuterVolumeSpecName: "kube-api-access-k9nf8") pod "a68882a5-f8f7-40a2-8406-409452df5dc5" (UID: "a68882a5-f8f7-40a2-8406-409452df5dc5"). InnerVolumeSpecName "kube-api-access-k9nf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.255714 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a68882a5-f8f7-40a2-8406-409452df5dc5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a68882a5-f8f7-40a2-8406-409452df5dc5" (UID: "a68882a5-f8f7-40a2-8406-409452df5dc5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.259448 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a68882a5-f8f7-40a2-8406-409452df5dc5-inventory" (OuterVolumeSpecName: "inventory") pod "a68882a5-f8f7-40a2-8406-409452df5dc5" (UID: "a68882a5-f8f7-40a2-8406-409452df5dc5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.306940 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9nf8\" (UniqueName: \"kubernetes.io/projected/a68882a5-f8f7-40a2-8406-409452df5dc5-kube-api-access-k9nf8\") on node \"crc\" DevicePath \"\"" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.306973 4661 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a68882a5-f8f7-40a2-8406-409452df5dc5-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.306987 4661 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a68882a5-f8f7-40a2-8406-409452df5dc5-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.680685 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk" event={"ID":"a68882a5-f8f7-40a2-8406-409452df5dc5","Type":"ContainerDied","Data":"3fa28ed23b931439b75846c4f916e664f36a118948ee124ca71e688db6b85df7"} Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.680749 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3fa28ed23b931439b75846c4f916e664f36a118948ee124ca71e688db6b85df7" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.680833 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.793325 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9248r"] Oct 01 05:58:04 crc kubenswrapper[4661]: E1001 05:58:04.793935 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="411367fd-0c10-4b1b-964f-68fa66317635" containerName="extract-content" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.793956 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="411367fd-0c10-4b1b-964f-68fa66317635" containerName="extract-content" Oct 01 05:58:04 crc kubenswrapper[4661]: E1001 05:58:04.793987 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="411367fd-0c10-4b1b-964f-68fa66317635" containerName="registry-server" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.794000 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="411367fd-0c10-4b1b-964f-68fa66317635" containerName="registry-server" Oct 01 05:58:04 crc kubenswrapper[4661]: E1001 05:58:04.794034 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="411367fd-0c10-4b1b-964f-68fa66317635" containerName="extract-utilities" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.794046 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="411367fd-0c10-4b1b-964f-68fa66317635" containerName="extract-utilities" Oct 01 05:58:04 crc kubenswrapper[4661]: E1001 05:58:04.794070 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a68882a5-f8f7-40a2-8406-409452df5dc5" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.794115 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="a68882a5-f8f7-40a2-8406-409452df5dc5" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 01 05:58:04 crc kubenswrapper[4661]: E1001 05:58:04.794146 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1328df6-0e3e-4d3a-a93e-bef8d8608f7f" containerName="extract-content" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.794162 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1328df6-0e3e-4d3a-a93e-bef8d8608f7f" containerName="extract-content" Oct 01 05:58:04 crc kubenswrapper[4661]: E1001 05:58:04.794194 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1328df6-0e3e-4d3a-a93e-bef8d8608f7f" containerName="registry-server" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.794225 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1328df6-0e3e-4d3a-a93e-bef8d8608f7f" containerName="registry-server" Oct 01 05:58:04 crc kubenswrapper[4661]: E1001 05:58:04.794271 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1328df6-0e3e-4d3a-a93e-bef8d8608f7f" containerName="extract-utilities" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.794286 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1328df6-0e3e-4d3a-a93e-bef8d8608f7f" containerName="extract-utilities" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.794673 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="411367fd-0c10-4b1b-964f-68fa66317635" containerName="registry-server" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.794709 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="a68882a5-f8f7-40a2-8406-409452df5dc5" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.794765 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1328df6-0e3e-4d3a-a93e-bef8d8608f7f" containerName="registry-server" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.795870 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9248r" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.799369 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.799386 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.799601 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.799839 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-srk7f" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.806569 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9248r"] Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.921286 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c6c35cbf-45e4-4538-b2de-4dbabd413307-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9248r\" (UID: \"c6c35cbf-45e4-4538-b2de-4dbabd413307\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9248r" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.921330 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6c35cbf-45e4-4538-b2de-4dbabd413307-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9248r\" (UID: \"c6c35cbf-45e4-4538-b2de-4dbabd413307\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9248r" Oct 01 05:58:04 crc kubenswrapper[4661]: I1001 05:58:04.921375 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqbkz\" (UniqueName: \"kubernetes.io/projected/c6c35cbf-45e4-4538-b2de-4dbabd413307-kube-api-access-dqbkz\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9248r\" (UID: \"c6c35cbf-45e4-4538-b2de-4dbabd413307\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9248r" Oct 01 05:58:05 crc kubenswrapper[4661]: I1001 05:58:05.023575 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c6c35cbf-45e4-4538-b2de-4dbabd413307-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9248r\" (UID: \"c6c35cbf-45e4-4538-b2de-4dbabd413307\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9248r" Oct 01 05:58:05 crc kubenswrapper[4661]: I1001 05:58:05.023652 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6c35cbf-45e4-4538-b2de-4dbabd413307-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9248r\" (UID: \"c6c35cbf-45e4-4538-b2de-4dbabd413307\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9248r" Oct 01 05:58:05 crc kubenswrapper[4661]: I1001 05:58:05.023755 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqbkz\" (UniqueName: \"kubernetes.io/projected/c6c35cbf-45e4-4538-b2de-4dbabd413307-kube-api-access-dqbkz\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9248r\" (UID: \"c6c35cbf-45e4-4538-b2de-4dbabd413307\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9248r" Oct 01 05:58:05 crc kubenswrapper[4661]: I1001 05:58:05.027998 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6c35cbf-45e4-4538-b2de-4dbabd413307-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9248r\" (UID: \"c6c35cbf-45e4-4538-b2de-4dbabd413307\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9248r" Oct 01 05:58:05 crc kubenswrapper[4661]: I1001 05:58:05.032383 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c6c35cbf-45e4-4538-b2de-4dbabd413307-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9248r\" (UID: \"c6c35cbf-45e4-4538-b2de-4dbabd413307\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9248r" Oct 01 05:58:05 crc kubenswrapper[4661]: I1001 05:58:05.045097 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqbkz\" (UniqueName: \"kubernetes.io/projected/c6c35cbf-45e4-4538-b2de-4dbabd413307-kube-api-access-dqbkz\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9248r\" (UID: \"c6c35cbf-45e4-4538-b2de-4dbabd413307\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9248r" Oct 01 05:58:05 crc kubenswrapper[4661]: I1001 05:58:05.118008 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9248r" Oct 01 05:58:05 crc kubenswrapper[4661]: I1001 05:58:05.491597 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9248r"] Oct 01 05:58:05 crc kubenswrapper[4661]: I1001 05:58:05.694186 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9248r" event={"ID":"c6c35cbf-45e4-4538-b2de-4dbabd413307","Type":"ContainerStarted","Data":"fde9fb7cb77897899ccbb37714b44fb54eb77784aa20a52f4d9e3efacd7a895e"} Oct 01 05:58:06 crc kubenswrapper[4661]: I1001 05:58:06.710458 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9248r" event={"ID":"c6c35cbf-45e4-4538-b2de-4dbabd413307","Type":"ContainerStarted","Data":"c9af4cd94e03e472886488e91d85bc88dc4d28b10b3c5df3847f24ec7be2a753"} Oct 01 05:58:06 crc kubenswrapper[4661]: I1001 05:58:06.739688 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9248r" podStartSLOduration=2.2538042320000002 podStartE2EDuration="2.739662286s" podCreationTimestamp="2025-10-01 05:58:04 +0000 UTC" firstStartedPulling="2025-10-01 05:58:05.463666275 +0000 UTC m=+1734.401644889" lastFinishedPulling="2025-10-01 05:58:05.949524299 +0000 UTC m=+1734.887502943" observedRunningTime="2025-10-01 05:58:06.734282641 +0000 UTC m=+1735.672261285" watchObservedRunningTime="2025-10-01 05:58:06.739662286 +0000 UTC m=+1735.677640930" Oct 01 05:58:14 crc kubenswrapper[4661]: I1001 05:58:14.050338 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-cwwsl"] Oct 01 05:58:14 crc kubenswrapper[4661]: I1001 05:58:14.062316 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-cwwsl"] Oct 01 05:58:14 crc kubenswrapper[4661]: I1001 05:58:14.757112 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:58:14 crc kubenswrapper[4661]: E1001 05:58:14.757526 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:58:15 crc kubenswrapper[4661]: I1001 05:58:15.036137 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-bcc6q"] Oct 01 05:58:15 crc kubenswrapper[4661]: I1001 05:58:15.047787 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-bcc6q"] Oct 01 05:58:15 crc kubenswrapper[4661]: I1001 05:58:15.772005 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ae8568a-ecc3-429d-9717-0d05cf2e52d1" path="/var/lib/kubelet/pods/9ae8568a-ecc3-429d-9717-0d05cf2e52d1/volumes" Oct 01 05:58:15 crc kubenswrapper[4661]: I1001 05:58:15.773612 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8fdca12-5e6d-43d7-ae59-33b3a388ada4" path="/var/lib/kubelet/pods/e8fdca12-5e6d-43d7-ae59-33b3a388ada4/volumes" Oct 01 05:58:20 crc kubenswrapper[4661]: I1001 05:58:20.545342 4661 scope.go:117] "RemoveContainer" containerID="4092a45be262474f59a4a32207b8a1c491e5af90c203dcfdd14f817c3b1211fc" Oct 01 05:58:20 crc kubenswrapper[4661]: I1001 05:58:20.596005 4661 scope.go:117] "RemoveContainer" containerID="4d30225988229ee0e740e71d9761ffcca9a41d499bc63419bda8d62f9742ae93" Oct 01 05:58:20 crc kubenswrapper[4661]: I1001 05:58:20.672401 4661 scope.go:117] "RemoveContainer" containerID="684d7b6c1ff2edf9b2e601cbd480ea164bcf7a129db8d29d1b4bcfa3de60612f" Oct 01 05:58:20 crc kubenswrapper[4661]: I1001 05:58:20.715745 4661 scope.go:117] "RemoveContainer" containerID="8de448f1d29f5376eec6fc8283ca1bbad3def19755f762b9e4ba1597dfffe250" Oct 01 05:58:20 crc kubenswrapper[4661]: I1001 05:58:20.795434 4661 scope.go:117] "RemoveContainer" containerID="14d34d13c845ef5fc2830f419636aab1ec2428e0e484e69af9db6473be13355a" Oct 01 05:58:20 crc kubenswrapper[4661]: I1001 05:58:20.868202 4661 scope.go:117] "RemoveContainer" containerID="8654ad653976aa1653eb601ae70d143f0f5334b9bbd9023a9f8fe748b6b06ae5" Oct 01 05:58:20 crc kubenswrapper[4661]: I1001 05:58:20.915118 4661 scope.go:117] "RemoveContainer" containerID="d9745e974c0817c5ce86fd92b9ce15c9d9d04c9b0506500519f1379e4a818036" Oct 01 05:58:20 crc kubenswrapper[4661]: I1001 05:58:20.936039 4661 scope.go:117] "RemoveContainer" containerID="66c30d91bfe1cf0a2f9433ecca0b7a396c346c95552cd932fc0ceb14b7c30c1d" Oct 01 05:58:26 crc kubenswrapper[4661]: I1001 05:58:26.758203 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:58:26 crc kubenswrapper[4661]: E1001 05:58:26.759219 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:58:38 crc kubenswrapper[4661]: I1001 05:58:38.063974 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-q5rzr"] Oct 01 05:58:38 crc kubenswrapper[4661]: I1001 05:58:38.085344 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-q5rzr"] Oct 01 05:58:39 crc kubenswrapper[4661]: I1001 05:58:39.038153 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-qt9ck"] Oct 01 05:58:39 crc kubenswrapper[4661]: I1001 05:58:39.047711 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-jmdzw"] Oct 01 05:58:39 crc kubenswrapper[4661]: I1001 05:58:39.081793 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-qt9ck"] Oct 01 05:58:39 crc kubenswrapper[4661]: I1001 05:58:39.091482 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-jmdzw"] Oct 01 05:58:39 crc kubenswrapper[4661]: I1001 05:58:39.757618 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:58:39 crc kubenswrapper[4661]: E1001 05:58:39.758254 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:58:39 crc kubenswrapper[4661]: I1001 05:58:39.775426 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60845954-9b29-47bd-9080-790730863053" path="/var/lib/kubelet/pods/60845954-9b29-47bd-9080-790730863053/volumes" Oct 01 05:58:39 crc kubenswrapper[4661]: I1001 05:58:39.779448 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67a75668-b682-4fb4-a779-8ff578213667" path="/var/lib/kubelet/pods/67a75668-b682-4fb4-a779-8ff578213667/volumes" Oct 01 05:58:39 crc kubenswrapper[4661]: I1001 05:58:39.781223 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b4455f4-4266-4c94-a56f-5f06005485fa" path="/var/lib/kubelet/pods/9b4455f4-4266-4c94-a56f-5f06005485fa/volumes" Oct 01 05:58:47 crc kubenswrapper[4661]: I1001 05:58:47.045040 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-q2jck"] Oct 01 05:58:47 crc kubenswrapper[4661]: I1001 05:58:47.060334 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-q2jck"] Oct 01 05:58:47 crc kubenswrapper[4661]: I1001 05:58:47.774446 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77022b65-9c8f-4173-957d-0d0e457bd838" path="/var/lib/kubelet/pods/77022b65-9c8f-4173-957d-0d0e457bd838/volumes" Oct 01 05:58:51 crc kubenswrapper[4661]: I1001 05:58:51.039081 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-1ba1-account-create-w5rqs"] Oct 01 05:58:51 crc kubenswrapper[4661]: I1001 05:58:51.052772 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-e3fc-account-create-nj9hp"] Oct 01 05:58:51 crc kubenswrapper[4661]: I1001 05:58:51.060817 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-4f36-account-create-b6p98"] Oct 01 05:58:51 crc kubenswrapper[4661]: I1001 05:58:51.066847 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-e3fc-account-create-nj9hp"] Oct 01 05:58:51 crc kubenswrapper[4661]: I1001 05:58:51.072699 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-1ba1-account-create-w5rqs"] Oct 01 05:58:51 crc kubenswrapper[4661]: I1001 05:58:51.078936 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-4f36-account-create-b6p98"] Oct 01 05:58:51 crc kubenswrapper[4661]: I1001 05:58:51.763392 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:58:51 crc kubenswrapper[4661]: E1001 05:58:51.763598 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:58:51 crc kubenswrapper[4661]: I1001 05:58:51.783138 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22866a21-7ff0-46cd-8bbe-5a133c012e47" path="/var/lib/kubelet/pods/22866a21-7ff0-46cd-8bbe-5a133c012e47/volumes" Oct 01 05:58:51 crc kubenswrapper[4661]: I1001 05:58:51.783968 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a138f3b-8b69-490d-9e44-67d9b56247f2" path="/var/lib/kubelet/pods/2a138f3b-8b69-490d-9e44-67d9b56247f2/volumes" Oct 01 05:58:51 crc kubenswrapper[4661]: I1001 05:58:51.784746 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0168d5b-fbba-4e5d-9efe-556278ffd191" path="/var/lib/kubelet/pods/c0168d5b-fbba-4e5d-9efe-556278ffd191/volumes" Oct 01 05:59:04 crc kubenswrapper[4661]: I1001 05:59:04.757021 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:59:04 crc kubenswrapper[4661]: E1001 05:59:04.758183 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:59:17 crc kubenswrapper[4661]: I1001 05:59:17.758031 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:59:17 crc kubenswrapper[4661]: E1001 05:59:17.759224 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:59:21 crc kubenswrapper[4661]: I1001 05:59:21.168468 4661 scope.go:117] "RemoveContainer" containerID="65668dfb53b2e53a0efbe7ba9d0323e3b0f8dec532ea046c5bc9fb5a15cb1373" Oct 01 05:59:21 crc kubenswrapper[4661]: I1001 05:59:21.212720 4661 scope.go:117] "RemoveContainer" containerID="ac21ee7474abda5faf8aa48e1085b7e8c890fd7166e5567fa31333ed4523fe5e" Oct 01 05:59:21 crc kubenswrapper[4661]: I1001 05:59:21.261067 4661 scope.go:117] "RemoveContainer" containerID="20354aefd144b91d69a8b8c9f5cd08da669768d8f90a0fccc9e0c6a4ed7958c5" Oct 01 05:59:21 crc kubenswrapper[4661]: I1001 05:59:21.305712 4661 scope.go:117] "RemoveContainer" containerID="d04e7d70fbe3ab61b775efa625dbb36d86950d4d4c2ecdb20474c5276554728c" Oct 01 05:59:21 crc kubenswrapper[4661]: I1001 05:59:21.341879 4661 scope.go:117] "RemoveContainer" containerID="6352b58cc9e764d94a8c56172765b178676fe88c08c422aa7051b4c057710b88" Oct 01 05:59:21 crc kubenswrapper[4661]: I1001 05:59:21.388798 4661 scope.go:117] "RemoveContainer" containerID="88b6d9d89366ef096b51bc9bed89254ab2357e9dd7d7ca98ae93f11cbc11d9b9" Oct 01 05:59:21 crc kubenswrapper[4661]: I1001 05:59:21.440236 4661 scope.go:117] "RemoveContainer" containerID="ab543598b5b7e4b7eaf1bf667901b89b5d375560db221ec6870faa8731e59ed8" Oct 01 05:59:23 crc kubenswrapper[4661]: I1001 05:59:23.627820 4661 generic.go:334] "Generic (PLEG): container finished" podID="c6c35cbf-45e4-4538-b2de-4dbabd413307" containerID="c9af4cd94e03e472886488e91d85bc88dc4d28b10b3c5df3847f24ec7be2a753" exitCode=0 Oct 01 05:59:23 crc kubenswrapper[4661]: I1001 05:59:23.627902 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9248r" event={"ID":"c6c35cbf-45e4-4538-b2de-4dbabd413307","Type":"ContainerDied","Data":"c9af4cd94e03e472886488e91d85bc88dc4d28b10b3c5df3847f24ec7be2a753"} Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.041489 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-cqms7"] Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.053329 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-cqms7"] Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.117305 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9248r" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.282645 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6c35cbf-45e4-4538-b2de-4dbabd413307-inventory\") pod \"c6c35cbf-45e4-4538-b2de-4dbabd413307\" (UID: \"c6c35cbf-45e4-4538-b2de-4dbabd413307\") " Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.282696 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c6c35cbf-45e4-4538-b2de-4dbabd413307-ssh-key\") pod \"c6c35cbf-45e4-4538-b2de-4dbabd413307\" (UID: \"c6c35cbf-45e4-4538-b2de-4dbabd413307\") " Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.282731 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqbkz\" (UniqueName: \"kubernetes.io/projected/c6c35cbf-45e4-4538-b2de-4dbabd413307-kube-api-access-dqbkz\") pod \"c6c35cbf-45e4-4538-b2de-4dbabd413307\" (UID: \"c6c35cbf-45e4-4538-b2de-4dbabd413307\") " Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.288855 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6c35cbf-45e4-4538-b2de-4dbabd413307-kube-api-access-dqbkz" (OuterVolumeSpecName: "kube-api-access-dqbkz") pod "c6c35cbf-45e4-4538-b2de-4dbabd413307" (UID: "c6c35cbf-45e4-4538-b2de-4dbabd413307"). InnerVolumeSpecName "kube-api-access-dqbkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.313118 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6c35cbf-45e4-4538-b2de-4dbabd413307-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c6c35cbf-45e4-4538-b2de-4dbabd413307" (UID: "c6c35cbf-45e4-4538-b2de-4dbabd413307"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.338515 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6c35cbf-45e4-4538-b2de-4dbabd413307-inventory" (OuterVolumeSpecName: "inventory") pod "c6c35cbf-45e4-4538-b2de-4dbabd413307" (UID: "c6c35cbf-45e4-4538-b2de-4dbabd413307"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.385526 4661 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6c35cbf-45e4-4538-b2de-4dbabd413307-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.385562 4661 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c6c35cbf-45e4-4538-b2de-4dbabd413307-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.385573 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqbkz\" (UniqueName: \"kubernetes.io/projected/c6c35cbf-45e4-4538-b2de-4dbabd413307-kube-api-access-dqbkz\") on node \"crc\" DevicePath \"\"" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.655575 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9248r" event={"ID":"c6c35cbf-45e4-4538-b2de-4dbabd413307","Type":"ContainerDied","Data":"fde9fb7cb77897899ccbb37714b44fb54eb77784aa20a52f4d9e3efacd7a895e"} Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.655651 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fde9fb7cb77897899ccbb37714b44fb54eb77784aa20a52f4d9e3efacd7a895e" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.655737 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9248r" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.777595 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="198fb851-8eef-40d8-9074-997436cc6373" path="/var/lib/kubelet/pods/198fb851-8eef-40d8-9074-997436cc6373/volumes" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.784084 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq"] Oct 01 05:59:25 crc kubenswrapper[4661]: E1001 05:59:25.784916 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6c35cbf-45e4-4538-b2de-4dbabd413307" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.784957 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6c35cbf-45e4-4538-b2de-4dbabd413307" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.785400 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6c35cbf-45e4-4538-b2de-4dbabd413307" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.786591 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.789066 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.789712 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-srk7f" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.789906 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.792576 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.792874 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq"] Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.894792 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66rzx\" (UniqueName: \"kubernetes.io/projected/5ec720ac-37b8-4dab-9b08-717b48bfae27-kube-api-access-66rzx\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq\" (UID: \"5ec720ac-37b8-4dab-9b08-717b48bfae27\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.895193 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5ec720ac-37b8-4dab-9b08-717b48bfae27-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq\" (UID: \"5ec720ac-37b8-4dab-9b08-717b48bfae27\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.895421 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ec720ac-37b8-4dab-9b08-717b48bfae27-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq\" (UID: \"5ec720ac-37b8-4dab-9b08-717b48bfae27\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.998420 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5ec720ac-37b8-4dab-9b08-717b48bfae27-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq\" (UID: \"5ec720ac-37b8-4dab-9b08-717b48bfae27\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.998566 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ec720ac-37b8-4dab-9b08-717b48bfae27-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq\" (UID: \"5ec720ac-37b8-4dab-9b08-717b48bfae27\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq" Oct 01 05:59:25 crc kubenswrapper[4661]: I1001 05:59:25.998775 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66rzx\" (UniqueName: \"kubernetes.io/projected/5ec720ac-37b8-4dab-9b08-717b48bfae27-kube-api-access-66rzx\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq\" (UID: \"5ec720ac-37b8-4dab-9b08-717b48bfae27\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq" Oct 01 05:59:26 crc kubenswrapper[4661]: I1001 05:59:26.003341 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ec720ac-37b8-4dab-9b08-717b48bfae27-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq\" (UID: \"5ec720ac-37b8-4dab-9b08-717b48bfae27\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq" Oct 01 05:59:26 crc kubenswrapper[4661]: I1001 05:59:26.003740 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5ec720ac-37b8-4dab-9b08-717b48bfae27-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq\" (UID: \"5ec720ac-37b8-4dab-9b08-717b48bfae27\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq" Oct 01 05:59:26 crc kubenswrapper[4661]: I1001 05:59:26.029239 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66rzx\" (UniqueName: \"kubernetes.io/projected/5ec720ac-37b8-4dab-9b08-717b48bfae27-kube-api-access-66rzx\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq\" (UID: \"5ec720ac-37b8-4dab-9b08-717b48bfae27\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq" Oct 01 05:59:26 crc kubenswrapper[4661]: I1001 05:59:26.105727 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq" Oct 01 05:59:26 crc kubenswrapper[4661]: I1001 05:59:26.546404 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq"] Oct 01 05:59:26 crc kubenswrapper[4661]: I1001 05:59:26.555567 4661 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 05:59:26 crc kubenswrapper[4661]: I1001 05:59:26.664583 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq" event={"ID":"5ec720ac-37b8-4dab-9b08-717b48bfae27","Type":"ContainerStarted","Data":"7c02317b8a227b7dfd7416b900783c70cd3f545fe26cc9a74e3c4864e3794af6"} Oct 01 05:59:27 crc kubenswrapper[4661]: I1001 05:59:27.680922 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq" event={"ID":"5ec720ac-37b8-4dab-9b08-717b48bfae27","Type":"ContainerStarted","Data":"abe86e6156ed5fe936c2615bd2828638d4f1b252a04ef55c91d2efe2acb725df"} Oct 01 05:59:27 crc kubenswrapper[4661]: I1001 05:59:27.707743 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq" podStartSLOduration=2.268139542 podStartE2EDuration="2.707726575s" podCreationTimestamp="2025-10-01 05:59:25 +0000 UTC" firstStartedPulling="2025-10-01 05:59:26.555086006 +0000 UTC m=+1815.493064620" lastFinishedPulling="2025-10-01 05:59:26.994673039 +0000 UTC m=+1815.932651653" observedRunningTime="2025-10-01 05:59:27.701757522 +0000 UTC m=+1816.639736136" watchObservedRunningTime="2025-10-01 05:59:27.707726575 +0000 UTC m=+1816.645705189" Oct 01 05:59:31 crc kubenswrapper[4661]: I1001 05:59:31.777887 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:59:31 crc kubenswrapper[4661]: E1001 05:59:31.782384 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 05:59:33 crc kubenswrapper[4661]: I1001 05:59:33.750573 4661 generic.go:334] "Generic (PLEG): container finished" podID="5ec720ac-37b8-4dab-9b08-717b48bfae27" containerID="abe86e6156ed5fe936c2615bd2828638d4f1b252a04ef55c91d2efe2acb725df" exitCode=0 Oct 01 05:59:33 crc kubenswrapper[4661]: I1001 05:59:33.750857 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq" event={"ID":"5ec720ac-37b8-4dab-9b08-717b48bfae27","Type":"ContainerDied","Data":"abe86e6156ed5fe936c2615bd2828638d4f1b252a04ef55c91d2efe2acb725df"} Oct 01 05:59:35 crc kubenswrapper[4661]: I1001 05:59:35.287995 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq" Oct 01 05:59:35 crc kubenswrapper[4661]: I1001 05:59:35.397915 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ec720ac-37b8-4dab-9b08-717b48bfae27-inventory\") pod \"5ec720ac-37b8-4dab-9b08-717b48bfae27\" (UID: \"5ec720ac-37b8-4dab-9b08-717b48bfae27\") " Oct 01 05:59:35 crc kubenswrapper[4661]: I1001 05:59:35.397982 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66rzx\" (UniqueName: \"kubernetes.io/projected/5ec720ac-37b8-4dab-9b08-717b48bfae27-kube-api-access-66rzx\") pod \"5ec720ac-37b8-4dab-9b08-717b48bfae27\" (UID: \"5ec720ac-37b8-4dab-9b08-717b48bfae27\") " Oct 01 05:59:35 crc kubenswrapper[4661]: I1001 05:59:35.398105 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5ec720ac-37b8-4dab-9b08-717b48bfae27-ssh-key\") pod \"5ec720ac-37b8-4dab-9b08-717b48bfae27\" (UID: \"5ec720ac-37b8-4dab-9b08-717b48bfae27\") " Oct 01 05:59:35 crc kubenswrapper[4661]: I1001 05:59:35.403703 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ec720ac-37b8-4dab-9b08-717b48bfae27-kube-api-access-66rzx" (OuterVolumeSpecName: "kube-api-access-66rzx") pod "5ec720ac-37b8-4dab-9b08-717b48bfae27" (UID: "5ec720ac-37b8-4dab-9b08-717b48bfae27"). InnerVolumeSpecName "kube-api-access-66rzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 05:59:35 crc kubenswrapper[4661]: I1001 05:59:35.425993 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ec720ac-37b8-4dab-9b08-717b48bfae27-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5ec720ac-37b8-4dab-9b08-717b48bfae27" (UID: "5ec720ac-37b8-4dab-9b08-717b48bfae27"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:59:35 crc kubenswrapper[4661]: I1001 05:59:35.450451 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ec720ac-37b8-4dab-9b08-717b48bfae27-inventory" (OuterVolumeSpecName: "inventory") pod "5ec720ac-37b8-4dab-9b08-717b48bfae27" (UID: "5ec720ac-37b8-4dab-9b08-717b48bfae27"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 05:59:35 crc kubenswrapper[4661]: I1001 05:59:35.501148 4661 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ec720ac-37b8-4dab-9b08-717b48bfae27-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 05:59:35 crc kubenswrapper[4661]: I1001 05:59:35.501194 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66rzx\" (UniqueName: \"kubernetes.io/projected/5ec720ac-37b8-4dab-9b08-717b48bfae27-kube-api-access-66rzx\") on node \"crc\" DevicePath \"\"" Oct 01 05:59:35 crc kubenswrapper[4661]: I1001 05:59:35.501214 4661 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5ec720ac-37b8-4dab-9b08-717b48bfae27-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 05:59:35 crc kubenswrapper[4661]: I1001 05:59:35.776583 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq" Oct 01 05:59:35 crc kubenswrapper[4661]: I1001 05:59:35.781945 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq" event={"ID":"5ec720ac-37b8-4dab-9b08-717b48bfae27","Type":"ContainerDied","Data":"7c02317b8a227b7dfd7416b900783c70cd3f545fe26cc9a74e3c4864e3794af6"} Oct 01 05:59:35 crc kubenswrapper[4661]: I1001 05:59:35.782006 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c02317b8a227b7dfd7416b900783c70cd3f545fe26cc9a74e3c4864e3794af6" Oct 01 05:59:35 crc kubenswrapper[4661]: I1001 05:59:35.984682 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-p6p8t"] Oct 01 05:59:35 crc kubenswrapper[4661]: E1001 05:59:35.986293 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ec720ac-37b8-4dab-9b08-717b48bfae27" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 01 05:59:35 crc kubenswrapper[4661]: I1001 05:59:35.986333 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ec720ac-37b8-4dab-9b08-717b48bfae27" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 01 05:59:35 crc kubenswrapper[4661]: I1001 05:59:35.987151 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ec720ac-37b8-4dab-9b08-717b48bfae27" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 01 05:59:35 crc kubenswrapper[4661]: I1001 05:59:35.992552 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p6p8t" Oct 01 05:59:36 crc kubenswrapper[4661]: I1001 05:59:36.007760 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 05:59:36 crc kubenswrapper[4661]: I1001 05:59:36.008221 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 05:59:36 crc kubenswrapper[4661]: I1001 05:59:36.008313 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 05:59:36 crc kubenswrapper[4661]: I1001 05:59:36.008713 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-srk7f" Oct 01 05:59:36 crc kubenswrapper[4661]: I1001 05:59:36.045949 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-p6p8t"] Oct 01 05:59:36 crc kubenswrapper[4661]: I1001 05:59:36.121671 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a9c45dbf-cdfc-49a6-ac9d-49609a690564-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-p6p8t\" (UID: \"a9c45dbf-cdfc-49a6-ac9d-49609a690564\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p6p8t" Oct 01 05:59:36 crc kubenswrapper[4661]: I1001 05:59:36.121828 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jz5r2\" (UniqueName: \"kubernetes.io/projected/a9c45dbf-cdfc-49a6-ac9d-49609a690564-kube-api-access-jz5r2\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-p6p8t\" (UID: \"a9c45dbf-cdfc-49a6-ac9d-49609a690564\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p6p8t" Oct 01 05:59:36 crc kubenswrapper[4661]: I1001 05:59:36.122239 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a9c45dbf-cdfc-49a6-ac9d-49609a690564-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-p6p8t\" (UID: \"a9c45dbf-cdfc-49a6-ac9d-49609a690564\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p6p8t" Oct 01 05:59:36 crc kubenswrapper[4661]: I1001 05:59:36.224325 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a9c45dbf-cdfc-49a6-ac9d-49609a690564-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-p6p8t\" (UID: \"a9c45dbf-cdfc-49a6-ac9d-49609a690564\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p6p8t" Oct 01 05:59:36 crc kubenswrapper[4661]: I1001 05:59:36.224956 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a9c45dbf-cdfc-49a6-ac9d-49609a690564-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-p6p8t\" (UID: \"a9c45dbf-cdfc-49a6-ac9d-49609a690564\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p6p8t" Oct 01 05:59:36 crc kubenswrapper[4661]: I1001 05:59:36.224994 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jz5r2\" (UniqueName: \"kubernetes.io/projected/a9c45dbf-cdfc-49a6-ac9d-49609a690564-kube-api-access-jz5r2\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-p6p8t\" (UID: \"a9c45dbf-cdfc-49a6-ac9d-49609a690564\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p6p8t" Oct 01 05:59:36 crc kubenswrapper[4661]: I1001 05:59:36.228949 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a9c45dbf-cdfc-49a6-ac9d-49609a690564-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-p6p8t\" (UID: \"a9c45dbf-cdfc-49a6-ac9d-49609a690564\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p6p8t" Oct 01 05:59:36 crc kubenswrapper[4661]: I1001 05:59:36.230425 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a9c45dbf-cdfc-49a6-ac9d-49609a690564-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-p6p8t\" (UID: \"a9c45dbf-cdfc-49a6-ac9d-49609a690564\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p6p8t" Oct 01 05:59:36 crc kubenswrapper[4661]: I1001 05:59:36.246175 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jz5r2\" (UniqueName: \"kubernetes.io/projected/a9c45dbf-cdfc-49a6-ac9d-49609a690564-kube-api-access-jz5r2\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-p6p8t\" (UID: \"a9c45dbf-cdfc-49a6-ac9d-49609a690564\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p6p8t" Oct 01 05:59:36 crc kubenswrapper[4661]: I1001 05:59:36.320578 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p6p8t" Oct 01 05:59:36 crc kubenswrapper[4661]: I1001 05:59:36.951722 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-p6p8t"] Oct 01 05:59:36 crc kubenswrapper[4661]: W1001 05:59:36.956837 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda9c45dbf_cdfc_49a6_ac9d_49609a690564.slice/crio-86c6ef15cefc3d41d97484ef5308b4c38f7843433ef5bf5f93df58ed17109089 WatchSource:0}: Error finding container 86c6ef15cefc3d41d97484ef5308b4c38f7843433ef5bf5f93df58ed17109089: Status 404 returned error can't find the container with id 86c6ef15cefc3d41d97484ef5308b4c38f7843433ef5bf5f93df58ed17109089 Oct 01 05:59:37 crc kubenswrapper[4661]: I1001 05:59:37.808527 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p6p8t" event={"ID":"a9c45dbf-cdfc-49a6-ac9d-49609a690564","Type":"ContainerStarted","Data":"f28d59f5966947ef67a50347873bb2e6bc7edac1cd5127237b32c2cfbcde2149"} Oct 01 05:59:37 crc kubenswrapper[4661]: I1001 05:59:37.809011 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p6p8t" event={"ID":"a9c45dbf-cdfc-49a6-ac9d-49609a690564","Type":"ContainerStarted","Data":"86c6ef15cefc3d41d97484ef5308b4c38f7843433ef5bf5f93df58ed17109089"} Oct 01 05:59:37 crc kubenswrapper[4661]: I1001 05:59:37.832884 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p6p8t" podStartSLOduration=2.439405989 podStartE2EDuration="2.832862536s" podCreationTimestamp="2025-10-01 05:59:35 +0000 UTC" firstStartedPulling="2025-10-01 05:59:36.958908061 +0000 UTC m=+1825.896886675" lastFinishedPulling="2025-10-01 05:59:37.352364608 +0000 UTC m=+1826.290343222" observedRunningTime="2025-10-01 05:59:37.823026928 +0000 UTC m=+1826.761005542" watchObservedRunningTime="2025-10-01 05:59:37.832862536 +0000 UTC m=+1826.770841160" Oct 01 05:59:44 crc kubenswrapper[4661]: I1001 05:59:44.757455 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 05:59:45 crc kubenswrapper[4661]: I1001 05:59:45.901413 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"9945639cebecb82e62ea77554393d94ec336d0d27f9ce3233bff36e74217a74a"} Oct 01 05:59:48 crc kubenswrapper[4661]: I1001 05:59:48.048831 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-csgws"] Oct 01 05:59:48 crc kubenswrapper[4661]: I1001 05:59:48.063568 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-csgws"] Oct 01 05:59:49 crc kubenswrapper[4661]: I1001 05:59:49.774859 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2410d057-e29d-4a70-bc09-c692348e5018" path="/var/lib/kubelet/pods/2410d057-e29d-4a70-bc09-c692348e5018/volumes" Oct 01 05:59:52 crc kubenswrapper[4661]: I1001 05:59:52.048236 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-5r72j"] Oct 01 05:59:52 crc kubenswrapper[4661]: I1001 05:59:52.061038 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-5r72j"] Oct 01 05:59:53 crc kubenswrapper[4661]: I1001 05:59:53.780916 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf9833ec-089c-4b0c-94b6-f04b7d7773ae" path="/var/lib/kubelet/pods/bf9833ec-089c-4b0c-94b6-f04b7d7773ae/volumes" Oct 01 06:00:00 crc kubenswrapper[4661]: I1001 06:00:00.168377 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321640-4lq59"] Oct 01 06:00:00 crc kubenswrapper[4661]: I1001 06:00:00.170567 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321640-4lq59" Oct 01 06:00:00 crc kubenswrapper[4661]: I1001 06:00:00.172515 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 06:00:00 crc kubenswrapper[4661]: I1001 06:00:00.172952 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 06:00:00 crc kubenswrapper[4661]: I1001 06:00:00.184357 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321640-4lq59"] Oct 01 06:00:00 crc kubenswrapper[4661]: I1001 06:00:00.233072 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/66df94f3-a98f-4546-912c-0784e65d770d-config-volume\") pod \"collect-profiles-29321640-4lq59\" (UID: \"66df94f3-a98f-4546-912c-0784e65d770d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321640-4lq59" Oct 01 06:00:00 crc kubenswrapper[4661]: I1001 06:00:00.233302 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/66df94f3-a98f-4546-912c-0784e65d770d-secret-volume\") pod \"collect-profiles-29321640-4lq59\" (UID: \"66df94f3-a98f-4546-912c-0784e65d770d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321640-4lq59" Oct 01 06:00:00 crc kubenswrapper[4661]: I1001 06:00:00.233378 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfw8h\" (UniqueName: \"kubernetes.io/projected/66df94f3-a98f-4546-912c-0784e65d770d-kube-api-access-rfw8h\") pod \"collect-profiles-29321640-4lq59\" (UID: \"66df94f3-a98f-4546-912c-0784e65d770d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321640-4lq59" Oct 01 06:00:00 crc kubenswrapper[4661]: I1001 06:00:00.335207 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/66df94f3-a98f-4546-912c-0784e65d770d-config-volume\") pod \"collect-profiles-29321640-4lq59\" (UID: \"66df94f3-a98f-4546-912c-0784e65d770d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321640-4lq59" Oct 01 06:00:00 crc kubenswrapper[4661]: I1001 06:00:00.335402 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/66df94f3-a98f-4546-912c-0784e65d770d-secret-volume\") pod \"collect-profiles-29321640-4lq59\" (UID: \"66df94f3-a98f-4546-912c-0784e65d770d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321640-4lq59" Oct 01 06:00:00 crc kubenswrapper[4661]: I1001 06:00:00.335441 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfw8h\" (UniqueName: \"kubernetes.io/projected/66df94f3-a98f-4546-912c-0784e65d770d-kube-api-access-rfw8h\") pod \"collect-profiles-29321640-4lq59\" (UID: \"66df94f3-a98f-4546-912c-0784e65d770d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321640-4lq59" Oct 01 06:00:00 crc kubenswrapper[4661]: I1001 06:00:00.336303 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/66df94f3-a98f-4546-912c-0784e65d770d-config-volume\") pod \"collect-profiles-29321640-4lq59\" (UID: \"66df94f3-a98f-4546-912c-0784e65d770d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321640-4lq59" Oct 01 06:00:00 crc kubenswrapper[4661]: I1001 06:00:00.344474 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/66df94f3-a98f-4546-912c-0784e65d770d-secret-volume\") pod \"collect-profiles-29321640-4lq59\" (UID: \"66df94f3-a98f-4546-912c-0784e65d770d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321640-4lq59" Oct 01 06:00:00 crc kubenswrapper[4661]: I1001 06:00:00.356895 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfw8h\" (UniqueName: \"kubernetes.io/projected/66df94f3-a98f-4546-912c-0784e65d770d-kube-api-access-rfw8h\") pod \"collect-profiles-29321640-4lq59\" (UID: \"66df94f3-a98f-4546-912c-0784e65d770d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321640-4lq59" Oct 01 06:00:00 crc kubenswrapper[4661]: I1001 06:00:00.505754 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321640-4lq59" Oct 01 06:00:01 crc kubenswrapper[4661]: I1001 06:00:01.007984 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321640-4lq59"] Oct 01 06:00:01 crc kubenswrapper[4661]: I1001 06:00:01.096232 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321640-4lq59" event={"ID":"66df94f3-a98f-4546-912c-0784e65d770d","Type":"ContainerStarted","Data":"a34d8ff753c6d5d8b88a2162c7a9e18fbc5f17c86829861ff2bb4ae9dbf5f599"} Oct 01 06:00:02 crc kubenswrapper[4661]: I1001 06:00:02.107194 4661 generic.go:334] "Generic (PLEG): container finished" podID="66df94f3-a98f-4546-912c-0784e65d770d" containerID="62a60e87fd8ed151c3f68d5758f214ec957850e034067ec3f1be5affeeb7c717" exitCode=0 Oct 01 06:00:02 crc kubenswrapper[4661]: I1001 06:00:02.107308 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321640-4lq59" event={"ID":"66df94f3-a98f-4546-912c-0784e65d770d","Type":"ContainerDied","Data":"62a60e87fd8ed151c3f68d5758f214ec957850e034067ec3f1be5affeeb7c717"} Oct 01 06:00:03 crc kubenswrapper[4661]: I1001 06:00:03.554497 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321640-4lq59" Oct 01 06:00:03 crc kubenswrapper[4661]: I1001 06:00:03.612245 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/66df94f3-a98f-4546-912c-0784e65d770d-config-volume\") pod \"66df94f3-a98f-4546-912c-0784e65d770d\" (UID: \"66df94f3-a98f-4546-912c-0784e65d770d\") " Oct 01 06:00:03 crc kubenswrapper[4661]: I1001 06:00:03.612426 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rfw8h\" (UniqueName: \"kubernetes.io/projected/66df94f3-a98f-4546-912c-0784e65d770d-kube-api-access-rfw8h\") pod \"66df94f3-a98f-4546-912c-0784e65d770d\" (UID: \"66df94f3-a98f-4546-912c-0784e65d770d\") " Oct 01 06:00:03 crc kubenswrapper[4661]: I1001 06:00:03.613144 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66df94f3-a98f-4546-912c-0784e65d770d-config-volume" (OuterVolumeSpecName: "config-volume") pod "66df94f3-a98f-4546-912c-0784e65d770d" (UID: "66df94f3-a98f-4546-912c-0784e65d770d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:00:03 crc kubenswrapper[4661]: I1001 06:00:03.613494 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/66df94f3-a98f-4546-912c-0784e65d770d-secret-volume\") pod \"66df94f3-a98f-4546-912c-0784e65d770d\" (UID: \"66df94f3-a98f-4546-912c-0784e65d770d\") " Oct 01 06:00:03 crc kubenswrapper[4661]: I1001 06:00:03.613965 4661 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/66df94f3-a98f-4546-912c-0784e65d770d-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 06:00:03 crc kubenswrapper[4661]: I1001 06:00:03.621888 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66df94f3-a98f-4546-912c-0784e65d770d-kube-api-access-rfw8h" (OuterVolumeSpecName: "kube-api-access-rfw8h") pod "66df94f3-a98f-4546-912c-0784e65d770d" (UID: "66df94f3-a98f-4546-912c-0784e65d770d"). InnerVolumeSpecName "kube-api-access-rfw8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:00:03 crc kubenswrapper[4661]: I1001 06:00:03.622944 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66df94f3-a98f-4546-912c-0784e65d770d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "66df94f3-a98f-4546-912c-0784e65d770d" (UID: "66df94f3-a98f-4546-912c-0784e65d770d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:00:03 crc kubenswrapper[4661]: I1001 06:00:03.715525 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rfw8h\" (UniqueName: \"kubernetes.io/projected/66df94f3-a98f-4546-912c-0784e65d770d-kube-api-access-rfw8h\") on node \"crc\" DevicePath \"\"" Oct 01 06:00:03 crc kubenswrapper[4661]: I1001 06:00:03.715561 4661 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/66df94f3-a98f-4546-912c-0784e65d770d-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 06:00:04 crc kubenswrapper[4661]: I1001 06:00:04.129172 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321640-4lq59" event={"ID":"66df94f3-a98f-4546-912c-0784e65d770d","Type":"ContainerDied","Data":"a34d8ff753c6d5d8b88a2162c7a9e18fbc5f17c86829861ff2bb4ae9dbf5f599"} Oct 01 06:00:04 crc kubenswrapper[4661]: I1001 06:00:04.129238 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a34d8ff753c6d5d8b88a2162c7a9e18fbc5f17c86829861ff2bb4ae9dbf5f599" Oct 01 06:00:04 crc kubenswrapper[4661]: I1001 06:00:04.129257 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321640-4lq59" Oct 01 06:00:21 crc kubenswrapper[4661]: I1001 06:00:21.342228 4661 generic.go:334] "Generic (PLEG): container finished" podID="a9c45dbf-cdfc-49a6-ac9d-49609a690564" containerID="f28d59f5966947ef67a50347873bb2e6bc7edac1cd5127237b32c2cfbcde2149" exitCode=0 Oct 01 06:00:21 crc kubenswrapper[4661]: I1001 06:00:21.342303 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p6p8t" event={"ID":"a9c45dbf-cdfc-49a6-ac9d-49609a690564","Type":"ContainerDied","Data":"f28d59f5966947ef67a50347873bb2e6bc7edac1cd5127237b32c2cfbcde2149"} Oct 01 06:00:21 crc kubenswrapper[4661]: I1001 06:00:21.659084 4661 scope.go:117] "RemoveContainer" containerID="de75f6f845cb37f06c30281e0964a1e75c66d66508d4747e00fc45a8219876bf" Oct 01 06:00:21 crc kubenswrapper[4661]: I1001 06:00:21.707489 4661 scope.go:117] "RemoveContainer" containerID="9beb3854a87bf01f3aa6b629be3ef358e7047a4f1547a8fb1ac2418072620c06" Oct 01 06:00:21 crc kubenswrapper[4661]: I1001 06:00:21.784425 4661 scope.go:117] "RemoveContainer" containerID="4b0965c75d4b3b8a906510553dbda0fd498f559e06c8b1b188d7dc7be8f56b59" Oct 01 06:00:22 crc kubenswrapper[4661]: I1001 06:00:22.861159 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p6p8t" Oct 01 06:00:22 crc kubenswrapper[4661]: I1001 06:00:22.941940 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a9c45dbf-cdfc-49a6-ac9d-49609a690564-inventory\") pod \"a9c45dbf-cdfc-49a6-ac9d-49609a690564\" (UID: \"a9c45dbf-cdfc-49a6-ac9d-49609a690564\") " Oct 01 06:00:22 crc kubenswrapper[4661]: I1001 06:00:22.942442 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a9c45dbf-cdfc-49a6-ac9d-49609a690564-ssh-key\") pod \"a9c45dbf-cdfc-49a6-ac9d-49609a690564\" (UID: \"a9c45dbf-cdfc-49a6-ac9d-49609a690564\") " Oct 01 06:00:22 crc kubenswrapper[4661]: I1001 06:00:22.942838 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jz5r2\" (UniqueName: \"kubernetes.io/projected/a9c45dbf-cdfc-49a6-ac9d-49609a690564-kube-api-access-jz5r2\") pod \"a9c45dbf-cdfc-49a6-ac9d-49609a690564\" (UID: \"a9c45dbf-cdfc-49a6-ac9d-49609a690564\") " Oct 01 06:00:22 crc kubenswrapper[4661]: I1001 06:00:22.952667 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9c45dbf-cdfc-49a6-ac9d-49609a690564-kube-api-access-jz5r2" (OuterVolumeSpecName: "kube-api-access-jz5r2") pod "a9c45dbf-cdfc-49a6-ac9d-49609a690564" (UID: "a9c45dbf-cdfc-49a6-ac9d-49609a690564"). InnerVolumeSpecName "kube-api-access-jz5r2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:00:22 crc kubenswrapper[4661]: I1001 06:00:22.979847 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9c45dbf-cdfc-49a6-ac9d-49609a690564-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a9c45dbf-cdfc-49a6-ac9d-49609a690564" (UID: "a9c45dbf-cdfc-49a6-ac9d-49609a690564"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:00:22 crc kubenswrapper[4661]: I1001 06:00:22.989795 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9c45dbf-cdfc-49a6-ac9d-49609a690564-inventory" (OuterVolumeSpecName: "inventory") pod "a9c45dbf-cdfc-49a6-ac9d-49609a690564" (UID: "a9c45dbf-cdfc-49a6-ac9d-49609a690564"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.046487 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jz5r2\" (UniqueName: \"kubernetes.io/projected/a9c45dbf-cdfc-49a6-ac9d-49609a690564-kube-api-access-jz5r2\") on node \"crc\" DevicePath \"\"" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.046526 4661 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a9c45dbf-cdfc-49a6-ac9d-49609a690564-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.046539 4661 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a9c45dbf-cdfc-49a6-ac9d-49609a690564-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.377843 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p6p8t" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.377714 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p6p8t" event={"ID":"a9c45dbf-cdfc-49a6-ac9d-49609a690564","Type":"ContainerDied","Data":"86c6ef15cefc3d41d97484ef5308b4c38f7843433ef5bf5f93df58ed17109089"} Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.378110 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86c6ef15cefc3d41d97484ef5308b4c38f7843433ef5bf5f93df58ed17109089" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.559181 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5"] Oct 01 06:00:23 crc kubenswrapper[4661]: E1001 06:00:23.559673 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66df94f3-a98f-4546-912c-0784e65d770d" containerName="collect-profiles" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.559698 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="66df94f3-a98f-4546-912c-0784e65d770d" containerName="collect-profiles" Oct 01 06:00:23 crc kubenswrapper[4661]: E1001 06:00:23.559712 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9c45dbf-cdfc-49a6-ac9d-49609a690564" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.559722 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9c45dbf-cdfc-49a6-ac9d-49609a690564" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.559996 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="66df94f3-a98f-4546-912c-0784e65d770d" containerName="collect-profiles" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.560029 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9c45dbf-cdfc-49a6-ac9d-49609a690564" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.560924 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.571006 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.571262 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.571488 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.571876 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-srk7f" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.577572 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5"] Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.658852 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4nvx\" (UniqueName: \"kubernetes.io/projected/d5db5a1f-67ff-49de-af0f-2ddc5b6dc078-kube-api-access-t4nvx\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5\" (UID: \"d5db5a1f-67ff-49de-af0f-2ddc5b6dc078\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.659016 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d5db5a1f-67ff-49de-af0f-2ddc5b6dc078-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5\" (UID: \"d5db5a1f-67ff-49de-af0f-2ddc5b6dc078\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.659181 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d5db5a1f-67ff-49de-af0f-2ddc5b6dc078-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5\" (UID: \"d5db5a1f-67ff-49de-af0f-2ddc5b6dc078\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.761498 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4nvx\" (UniqueName: \"kubernetes.io/projected/d5db5a1f-67ff-49de-af0f-2ddc5b6dc078-kube-api-access-t4nvx\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5\" (UID: \"d5db5a1f-67ff-49de-af0f-2ddc5b6dc078\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.761667 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d5db5a1f-67ff-49de-af0f-2ddc5b6dc078-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5\" (UID: \"d5db5a1f-67ff-49de-af0f-2ddc5b6dc078\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.761876 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d5db5a1f-67ff-49de-af0f-2ddc5b6dc078-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5\" (UID: \"d5db5a1f-67ff-49de-af0f-2ddc5b6dc078\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.766694 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d5db5a1f-67ff-49de-af0f-2ddc5b6dc078-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5\" (UID: \"d5db5a1f-67ff-49de-af0f-2ddc5b6dc078\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.767436 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d5db5a1f-67ff-49de-af0f-2ddc5b6dc078-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5\" (UID: \"d5db5a1f-67ff-49de-af0f-2ddc5b6dc078\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.787333 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4nvx\" (UniqueName: \"kubernetes.io/projected/d5db5a1f-67ff-49de-af0f-2ddc5b6dc078-kube-api-access-t4nvx\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5\" (UID: \"d5db5a1f-67ff-49de-af0f-2ddc5b6dc078\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5" Oct 01 06:00:23 crc kubenswrapper[4661]: I1001 06:00:23.880827 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5" Oct 01 06:00:24 crc kubenswrapper[4661]: I1001 06:00:24.481949 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5"] Oct 01 06:00:24 crc kubenswrapper[4661]: W1001 06:00:24.492504 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd5db5a1f_67ff_49de_af0f_2ddc5b6dc078.slice/crio-59f8b17ce7a316a9c8a9317778bb61f8d20ee7a34e58845318dedfa7f1709373 WatchSource:0}: Error finding container 59f8b17ce7a316a9c8a9317778bb61f8d20ee7a34e58845318dedfa7f1709373: Status 404 returned error can't find the container with id 59f8b17ce7a316a9c8a9317778bb61f8d20ee7a34e58845318dedfa7f1709373 Oct 01 06:00:25 crc kubenswrapper[4661]: I1001 06:00:25.406057 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5" event={"ID":"d5db5a1f-67ff-49de-af0f-2ddc5b6dc078","Type":"ContainerStarted","Data":"b07d3ef96aa768cbd60663a1ed7afd5f792941a82c6215cf9d3a9ceefebe55c3"} Oct 01 06:00:25 crc kubenswrapper[4661]: I1001 06:00:25.406420 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5" event={"ID":"d5db5a1f-67ff-49de-af0f-2ddc5b6dc078","Type":"ContainerStarted","Data":"59f8b17ce7a316a9c8a9317778bb61f8d20ee7a34e58845318dedfa7f1709373"} Oct 01 06:00:25 crc kubenswrapper[4661]: I1001 06:00:25.433492 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5" podStartSLOduration=1.8300910780000001 podStartE2EDuration="2.433464888s" podCreationTimestamp="2025-10-01 06:00:23 +0000 UTC" firstStartedPulling="2025-10-01 06:00:24.495842511 +0000 UTC m=+1873.433821155" lastFinishedPulling="2025-10-01 06:00:25.099216321 +0000 UTC m=+1874.037194965" observedRunningTime="2025-10-01 06:00:25.427403153 +0000 UTC m=+1874.365381807" watchObservedRunningTime="2025-10-01 06:00:25.433464888 +0000 UTC m=+1874.371443532" Oct 01 06:00:33 crc kubenswrapper[4661]: I1001 06:00:33.077477 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-mfflc"] Oct 01 06:00:33 crc kubenswrapper[4661]: I1001 06:00:33.098301 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-mfflc"] Oct 01 06:00:33 crc kubenswrapper[4661]: I1001 06:00:33.772925 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a9e6b8f-69ef-4dc3-87ae-132c47989184" path="/var/lib/kubelet/pods/7a9e6b8f-69ef-4dc3-87ae-132c47989184/volumes" Oct 01 06:01:00 crc kubenswrapper[4661]: I1001 06:01:00.174780 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29321641-pqtk6"] Oct 01 06:01:00 crc kubenswrapper[4661]: I1001 06:01:00.178622 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29321641-pqtk6" Oct 01 06:01:00 crc kubenswrapper[4661]: I1001 06:01:00.187438 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29321641-pqtk6"] Oct 01 06:01:00 crc kubenswrapper[4661]: I1001 06:01:00.262625 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbe73ddd-f327-4f4b-ba2d-d647bae84361-config-data\") pod \"keystone-cron-29321641-pqtk6\" (UID: \"bbe73ddd-f327-4f4b-ba2d-d647bae84361\") " pod="openstack/keystone-cron-29321641-pqtk6" Oct 01 06:01:00 crc kubenswrapper[4661]: I1001 06:01:00.262758 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqngj\" (UniqueName: \"kubernetes.io/projected/bbe73ddd-f327-4f4b-ba2d-d647bae84361-kube-api-access-lqngj\") pod \"keystone-cron-29321641-pqtk6\" (UID: \"bbe73ddd-f327-4f4b-ba2d-d647bae84361\") " pod="openstack/keystone-cron-29321641-pqtk6" Oct 01 06:01:00 crc kubenswrapper[4661]: I1001 06:01:00.262845 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbe73ddd-f327-4f4b-ba2d-d647bae84361-combined-ca-bundle\") pod \"keystone-cron-29321641-pqtk6\" (UID: \"bbe73ddd-f327-4f4b-ba2d-d647bae84361\") " pod="openstack/keystone-cron-29321641-pqtk6" Oct 01 06:01:00 crc kubenswrapper[4661]: I1001 06:01:00.263283 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bbe73ddd-f327-4f4b-ba2d-d647bae84361-fernet-keys\") pod \"keystone-cron-29321641-pqtk6\" (UID: \"bbe73ddd-f327-4f4b-ba2d-d647bae84361\") " pod="openstack/keystone-cron-29321641-pqtk6" Oct 01 06:01:00 crc kubenswrapper[4661]: I1001 06:01:00.366756 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbe73ddd-f327-4f4b-ba2d-d647bae84361-config-data\") pod \"keystone-cron-29321641-pqtk6\" (UID: \"bbe73ddd-f327-4f4b-ba2d-d647bae84361\") " pod="openstack/keystone-cron-29321641-pqtk6" Oct 01 06:01:00 crc kubenswrapper[4661]: I1001 06:01:00.366908 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqngj\" (UniqueName: \"kubernetes.io/projected/bbe73ddd-f327-4f4b-ba2d-d647bae84361-kube-api-access-lqngj\") pod \"keystone-cron-29321641-pqtk6\" (UID: \"bbe73ddd-f327-4f4b-ba2d-d647bae84361\") " pod="openstack/keystone-cron-29321641-pqtk6" Oct 01 06:01:00 crc kubenswrapper[4661]: I1001 06:01:00.367090 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbe73ddd-f327-4f4b-ba2d-d647bae84361-combined-ca-bundle\") pod \"keystone-cron-29321641-pqtk6\" (UID: \"bbe73ddd-f327-4f4b-ba2d-d647bae84361\") " pod="openstack/keystone-cron-29321641-pqtk6" Oct 01 06:01:00 crc kubenswrapper[4661]: I1001 06:01:00.368389 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bbe73ddd-f327-4f4b-ba2d-d647bae84361-fernet-keys\") pod \"keystone-cron-29321641-pqtk6\" (UID: \"bbe73ddd-f327-4f4b-ba2d-d647bae84361\") " pod="openstack/keystone-cron-29321641-pqtk6" Oct 01 06:01:00 crc kubenswrapper[4661]: I1001 06:01:00.388046 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbe73ddd-f327-4f4b-ba2d-d647bae84361-combined-ca-bundle\") pod \"keystone-cron-29321641-pqtk6\" (UID: \"bbe73ddd-f327-4f4b-ba2d-d647bae84361\") " pod="openstack/keystone-cron-29321641-pqtk6" Oct 01 06:01:00 crc kubenswrapper[4661]: I1001 06:01:00.388231 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bbe73ddd-f327-4f4b-ba2d-d647bae84361-fernet-keys\") pod \"keystone-cron-29321641-pqtk6\" (UID: \"bbe73ddd-f327-4f4b-ba2d-d647bae84361\") " pod="openstack/keystone-cron-29321641-pqtk6" Oct 01 06:01:00 crc kubenswrapper[4661]: I1001 06:01:00.388245 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbe73ddd-f327-4f4b-ba2d-d647bae84361-config-data\") pod \"keystone-cron-29321641-pqtk6\" (UID: \"bbe73ddd-f327-4f4b-ba2d-d647bae84361\") " pod="openstack/keystone-cron-29321641-pqtk6" Oct 01 06:01:00 crc kubenswrapper[4661]: I1001 06:01:00.401925 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqngj\" (UniqueName: \"kubernetes.io/projected/bbe73ddd-f327-4f4b-ba2d-d647bae84361-kube-api-access-lqngj\") pod \"keystone-cron-29321641-pqtk6\" (UID: \"bbe73ddd-f327-4f4b-ba2d-d647bae84361\") " pod="openstack/keystone-cron-29321641-pqtk6" Oct 01 06:01:00 crc kubenswrapper[4661]: I1001 06:01:00.512728 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29321641-pqtk6" Oct 01 06:01:01 crc kubenswrapper[4661]: I1001 06:01:01.005263 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29321641-pqtk6"] Oct 01 06:01:01 crc kubenswrapper[4661]: I1001 06:01:01.919086 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29321641-pqtk6" event={"ID":"bbe73ddd-f327-4f4b-ba2d-d647bae84361","Type":"ContainerStarted","Data":"f0368918a7d369d2e035509825900ddf1c2ee7f3feac3094e18aca669be3eb35"} Oct 01 06:01:01 crc kubenswrapper[4661]: I1001 06:01:01.919676 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29321641-pqtk6" event={"ID":"bbe73ddd-f327-4f4b-ba2d-d647bae84361","Type":"ContainerStarted","Data":"16080199c7ea69b2c916fd34ce4dd39950140c53d511de10c3c191279b790932"} Oct 01 06:01:01 crc kubenswrapper[4661]: I1001 06:01:01.957703 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29321641-pqtk6" podStartSLOduration=1.9576742660000002 podStartE2EDuration="1.957674266s" podCreationTimestamp="2025-10-01 06:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:01:01.943998345 +0000 UTC m=+1910.881976989" watchObservedRunningTime="2025-10-01 06:01:01.957674266 +0000 UTC m=+1910.895652930" Oct 01 06:01:03 crc kubenswrapper[4661]: I1001 06:01:03.943073 4661 generic.go:334] "Generic (PLEG): container finished" podID="bbe73ddd-f327-4f4b-ba2d-d647bae84361" containerID="f0368918a7d369d2e035509825900ddf1c2ee7f3feac3094e18aca669be3eb35" exitCode=0 Oct 01 06:01:03 crc kubenswrapper[4661]: I1001 06:01:03.943307 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29321641-pqtk6" event={"ID":"bbe73ddd-f327-4f4b-ba2d-d647bae84361","Type":"ContainerDied","Data":"f0368918a7d369d2e035509825900ddf1c2ee7f3feac3094e18aca669be3eb35"} Oct 01 06:01:05 crc kubenswrapper[4661]: I1001 06:01:05.389102 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29321641-pqtk6" Oct 01 06:01:05 crc kubenswrapper[4661]: I1001 06:01:05.488159 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbe73ddd-f327-4f4b-ba2d-d647bae84361-config-data\") pod \"bbe73ddd-f327-4f4b-ba2d-d647bae84361\" (UID: \"bbe73ddd-f327-4f4b-ba2d-d647bae84361\") " Oct 01 06:01:05 crc kubenswrapper[4661]: I1001 06:01:05.488363 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lqngj\" (UniqueName: \"kubernetes.io/projected/bbe73ddd-f327-4f4b-ba2d-d647bae84361-kube-api-access-lqngj\") pod \"bbe73ddd-f327-4f4b-ba2d-d647bae84361\" (UID: \"bbe73ddd-f327-4f4b-ba2d-d647bae84361\") " Oct 01 06:01:05 crc kubenswrapper[4661]: I1001 06:01:05.488551 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbe73ddd-f327-4f4b-ba2d-d647bae84361-combined-ca-bundle\") pod \"bbe73ddd-f327-4f4b-ba2d-d647bae84361\" (UID: \"bbe73ddd-f327-4f4b-ba2d-d647bae84361\") " Oct 01 06:01:05 crc kubenswrapper[4661]: I1001 06:01:05.488610 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bbe73ddd-f327-4f4b-ba2d-d647bae84361-fernet-keys\") pod \"bbe73ddd-f327-4f4b-ba2d-d647bae84361\" (UID: \"bbe73ddd-f327-4f4b-ba2d-d647bae84361\") " Oct 01 06:01:05 crc kubenswrapper[4661]: I1001 06:01:05.494210 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbe73ddd-f327-4f4b-ba2d-d647bae84361-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "bbe73ddd-f327-4f4b-ba2d-d647bae84361" (UID: "bbe73ddd-f327-4f4b-ba2d-d647bae84361"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:01:05 crc kubenswrapper[4661]: I1001 06:01:05.494970 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbe73ddd-f327-4f4b-ba2d-d647bae84361-kube-api-access-lqngj" (OuterVolumeSpecName: "kube-api-access-lqngj") pod "bbe73ddd-f327-4f4b-ba2d-d647bae84361" (UID: "bbe73ddd-f327-4f4b-ba2d-d647bae84361"). InnerVolumeSpecName "kube-api-access-lqngj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:01:05 crc kubenswrapper[4661]: I1001 06:01:05.524364 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbe73ddd-f327-4f4b-ba2d-d647bae84361-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bbe73ddd-f327-4f4b-ba2d-d647bae84361" (UID: "bbe73ddd-f327-4f4b-ba2d-d647bae84361"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:01:05 crc kubenswrapper[4661]: I1001 06:01:05.546654 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbe73ddd-f327-4f4b-ba2d-d647bae84361-config-data" (OuterVolumeSpecName: "config-data") pod "bbe73ddd-f327-4f4b-ba2d-d647bae84361" (UID: "bbe73ddd-f327-4f4b-ba2d-d647bae84361"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:01:05 crc kubenswrapper[4661]: I1001 06:01:05.589983 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbe73ddd-f327-4f4b-ba2d-d647bae84361-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:01:05 crc kubenswrapper[4661]: I1001 06:01:05.590027 4661 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bbe73ddd-f327-4f4b-ba2d-d647bae84361-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 01 06:01:05 crc kubenswrapper[4661]: I1001 06:01:05.590042 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbe73ddd-f327-4f4b-ba2d-d647bae84361-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 06:01:05 crc kubenswrapper[4661]: I1001 06:01:05.590054 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lqngj\" (UniqueName: \"kubernetes.io/projected/bbe73ddd-f327-4f4b-ba2d-d647bae84361-kube-api-access-lqngj\") on node \"crc\" DevicePath \"\"" Oct 01 06:01:05 crc kubenswrapper[4661]: I1001 06:01:05.971820 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29321641-pqtk6" event={"ID":"bbe73ddd-f327-4f4b-ba2d-d647bae84361","Type":"ContainerDied","Data":"16080199c7ea69b2c916fd34ce4dd39950140c53d511de10c3c191279b790932"} Oct 01 06:01:05 crc kubenswrapper[4661]: I1001 06:01:05.971869 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="16080199c7ea69b2c916fd34ce4dd39950140c53d511de10c3c191279b790932" Oct 01 06:01:05 crc kubenswrapper[4661]: I1001 06:01:05.971884 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29321641-pqtk6" Oct 01 06:01:21 crc kubenswrapper[4661]: I1001 06:01:21.963025 4661 scope.go:117] "RemoveContainer" containerID="77fffd9e2758ac2d682816aff53c9b15f686aabe00f81898542207df802d3507" Oct 01 06:01:26 crc kubenswrapper[4661]: I1001 06:01:26.213728 4661 generic.go:334] "Generic (PLEG): container finished" podID="d5db5a1f-67ff-49de-af0f-2ddc5b6dc078" containerID="b07d3ef96aa768cbd60663a1ed7afd5f792941a82c6215cf9d3a9ceefebe55c3" exitCode=2 Oct 01 06:01:26 crc kubenswrapper[4661]: I1001 06:01:26.213844 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5" event={"ID":"d5db5a1f-67ff-49de-af0f-2ddc5b6dc078","Type":"ContainerDied","Data":"b07d3ef96aa768cbd60663a1ed7afd5f792941a82c6215cf9d3a9ceefebe55c3"} Oct 01 06:01:27 crc kubenswrapper[4661]: I1001 06:01:27.805576 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5" Oct 01 06:01:27 crc kubenswrapper[4661]: I1001 06:01:27.896874 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d5db5a1f-67ff-49de-af0f-2ddc5b6dc078-ssh-key\") pod \"d5db5a1f-67ff-49de-af0f-2ddc5b6dc078\" (UID: \"d5db5a1f-67ff-49de-af0f-2ddc5b6dc078\") " Oct 01 06:01:27 crc kubenswrapper[4661]: I1001 06:01:27.896956 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d5db5a1f-67ff-49de-af0f-2ddc5b6dc078-inventory\") pod \"d5db5a1f-67ff-49de-af0f-2ddc5b6dc078\" (UID: \"d5db5a1f-67ff-49de-af0f-2ddc5b6dc078\") " Oct 01 06:01:27 crc kubenswrapper[4661]: I1001 06:01:27.897110 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4nvx\" (UniqueName: \"kubernetes.io/projected/d5db5a1f-67ff-49de-af0f-2ddc5b6dc078-kube-api-access-t4nvx\") pod \"d5db5a1f-67ff-49de-af0f-2ddc5b6dc078\" (UID: \"d5db5a1f-67ff-49de-af0f-2ddc5b6dc078\") " Oct 01 06:01:27 crc kubenswrapper[4661]: I1001 06:01:27.907807 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5db5a1f-67ff-49de-af0f-2ddc5b6dc078-kube-api-access-t4nvx" (OuterVolumeSpecName: "kube-api-access-t4nvx") pod "d5db5a1f-67ff-49de-af0f-2ddc5b6dc078" (UID: "d5db5a1f-67ff-49de-af0f-2ddc5b6dc078"). InnerVolumeSpecName "kube-api-access-t4nvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:01:27 crc kubenswrapper[4661]: I1001 06:01:27.950250 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5db5a1f-67ff-49de-af0f-2ddc5b6dc078-inventory" (OuterVolumeSpecName: "inventory") pod "d5db5a1f-67ff-49de-af0f-2ddc5b6dc078" (UID: "d5db5a1f-67ff-49de-af0f-2ddc5b6dc078"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:01:27 crc kubenswrapper[4661]: I1001 06:01:27.952792 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5db5a1f-67ff-49de-af0f-2ddc5b6dc078-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d5db5a1f-67ff-49de-af0f-2ddc5b6dc078" (UID: "d5db5a1f-67ff-49de-af0f-2ddc5b6dc078"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:01:27 crc kubenswrapper[4661]: I1001 06:01:27.999586 4661 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d5db5a1f-67ff-49de-af0f-2ddc5b6dc078-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 06:01:27 crc kubenswrapper[4661]: I1001 06:01:27.999643 4661 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d5db5a1f-67ff-49de-af0f-2ddc5b6dc078-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 06:01:27 crc kubenswrapper[4661]: I1001 06:01:27.999658 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4nvx\" (UniqueName: \"kubernetes.io/projected/d5db5a1f-67ff-49de-af0f-2ddc5b6dc078-kube-api-access-t4nvx\") on node \"crc\" DevicePath \"\"" Oct 01 06:01:28 crc kubenswrapper[4661]: I1001 06:01:28.237970 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5" Oct 01 06:01:28 crc kubenswrapper[4661]: I1001 06:01:28.237959 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5" event={"ID":"d5db5a1f-67ff-49de-af0f-2ddc5b6dc078","Type":"ContainerDied","Data":"59f8b17ce7a316a9c8a9317778bb61f8d20ee7a34e58845318dedfa7f1709373"} Oct 01 06:01:28 crc kubenswrapper[4661]: I1001 06:01:28.238821 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="59f8b17ce7a316a9c8a9317778bb61f8d20ee7a34e58845318dedfa7f1709373" Oct 01 06:01:35 crc kubenswrapper[4661]: I1001 06:01:35.030468 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t"] Oct 01 06:01:35 crc kubenswrapper[4661]: E1001 06:01:35.031409 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbe73ddd-f327-4f4b-ba2d-d647bae84361" containerName="keystone-cron" Oct 01 06:01:35 crc kubenswrapper[4661]: I1001 06:01:35.031428 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbe73ddd-f327-4f4b-ba2d-d647bae84361" containerName="keystone-cron" Oct 01 06:01:35 crc kubenswrapper[4661]: E1001 06:01:35.031480 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5db5a1f-67ff-49de-af0f-2ddc5b6dc078" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 01 06:01:35 crc kubenswrapper[4661]: I1001 06:01:35.031488 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5db5a1f-67ff-49de-af0f-2ddc5b6dc078" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 01 06:01:35 crc kubenswrapper[4661]: I1001 06:01:35.031685 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5db5a1f-67ff-49de-af0f-2ddc5b6dc078" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 01 06:01:35 crc kubenswrapper[4661]: I1001 06:01:35.031706 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbe73ddd-f327-4f4b-ba2d-d647bae84361" containerName="keystone-cron" Oct 01 06:01:35 crc kubenswrapper[4661]: I1001 06:01:35.032347 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t" Oct 01 06:01:35 crc kubenswrapper[4661]: I1001 06:01:35.035854 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-srk7f" Oct 01 06:01:35 crc kubenswrapper[4661]: I1001 06:01:35.035861 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 06:01:35 crc kubenswrapper[4661]: I1001 06:01:35.040206 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 06:01:35 crc kubenswrapper[4661]: I1001 06:01:35.043192 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 06:01:35 crc kubenswrapper[4661]: I1001 06:01:35.051816 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t"] Oct 01 06:01:35 crc kubenswrapper[4661]: I1001 06:01:35.160487 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cbe5a564-344a-449a-a457-61e5002621a6-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t\" (UID: \"cbe5a564-344a-449a-a457-61e5002621a6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t" Oct 01 06:01:35 crc kubenswrapper[4661]: I1001 06:01:35.160711 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqb82\" (UniqueName: \"kubernetes.io/projected/cbe5a564-344a-449a-a457-61e5002621a6-kube-api-access-tqb82\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t\" (UID: \"cbe5a564-344a-449a-a457-61e5002621a6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t" Oct 01 06:01:35 crc kubenswrapper[4661]: I1001 06:01:35.160848 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cbe5a564-344a-449a-a457-61e5002621a6-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t\" (UID: \"cbe5a564-344a-449a-a457-61e5002621a6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t" Oct 01 06:01:35 crc kubenswrapper[4661]: I1001 06:01:35.263385 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cbe5a564-344a-449a-a457-61e5002621a6-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t\" (UID: \"cbe5a564-344a-449a-a457-61e5002621a6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t" Oct 01 06:01:35 crc kubenswrapper[4661]: I1001 06:01:35.263494 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqb82\" (UniqueName: \"kubernetes.io/projected/cbe5a564-344a-449a-a457-61e5002621a6-kube-api-access-tqb82\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t\" (UID: \"cbe5a564-344a-449a-a457-61e5002621a6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t" Oct 01 06:01:35 crc kubenswrapper[4661]: I1001 06:01:35.263583 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cbe5a564-344a-449a-a457-61e5002621a6-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t\" (UID: \"cbe5a564-344a-449a-a457-61e5002621a6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t" Oct 01 06:01:35 crc kubenswrapper[4661]: I1001 06:01:35.272761 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cbe5a564-344a-449a-a457-61e5002621a6-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t\" (UID: \"cbe5a564-344a-449a-a457-61e5002621a6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t" Oct 01 06:01:35 crc kubenswrapper[4661]: I1001 06:01:35.273933 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cbe5a564-344a-449a-a457-61e5002621a6-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t\" (UID: \"cbe5a564-344a-449a-a457-61e5002621a6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t" Oct 01 06:01:35 crc kubenswrapper[4661]: I1001 06:01:35.286933 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqb82\" (UniqueName: \"kubernetes.io/projected/cbe5a564-344a-449a-a457-61e5002621a6-kube-api-access-tqb82\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t\" (UID: \"cbe5a564-344a-449a-a457-61e5002621a6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t" Oct 01 06:01:35 crc kubenswrapper[4661]: I1001 06:01:35.356551 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t" Oct 01 06:01:35 crc kubenswrapper[4661]: I1001 06:01:35.991169 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t"] Oct 01 06:01:36 crc kubenswrapper[4661]: I1001 06:01:36.335466 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t" event={"ID":"cbe5a564-344a-449a-a457-61e5002621a6","Type":"ContainerStarted","Data":"baa0e29e85f1ef42cee1a52aa9dd4aa4c6c4eed9ced4001fb260db499cb6e9e4"} Oct 01 06:01:37 crc kubenswrapper[4661]: I1001 06:01:37.350452 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t" event={"ID":"cbe5a564-344a-449a-a457-61e5002621a6","Type":"ContainerStarted","Data":"de8bf22324352e6ef5c1c937f9fcc62a7936bd63d82080ca14f6a280ce456084"} Oct 01 06:01:37 crc kubenswrapper[4661]: I1001 06:01:37.379404 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t" podStartSLOduration=1.9169244399999998 podStartE2EDuration="2.379372467s" podCreationTimestamp="2025-10-01 06:01:35 +0000 UTC" firstStartedPulling="2025-10-01 06:01:36.002289155 +0000 UTC m=+1944.940267809" lastFinishedPulling="2025-10-01 06:01:36.464737182 +0000 UTC m=+1945.402715836" observedRunningTime="2025-10-01 06:01:37.366049355 +0000 UTC m=+1946.304028009" watchObservedRunningTime="2025-10-01 06:01:37.379372467 +0000 UTC m=+1946.317351111" Oct 01 06:02:04 crc kubenswrapper[4661]: I1001 06:02:04.309847 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:02:04 crc kubenswrapper[4661]: I1001 06:02:04.310615 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:02:22 crc kubenswrapper[4661]: I1001 06:02:22.085333 4661 scope.go:117] "RemoveContainer" containerID="b957c2823f07393ea32986fed554212c5466d6562b1ca7246a977c4f126d363a" Oct 01 06:02:22 crc kubenswrapper[4661]: I1001 06:02:22.114128 4661 scope.go:117] "RemoveContainer" containerID="1fea4348847ed8a0b64783f0d97fc70ecfb203a566eb11e3037be5d04e915b0a" Oct 01 06:02:22 crc kubenswrapper[4661]: I1001 06:02:22.168544 4661 scope.go:117] "RemoveContainer" containerID="ab2ac3596b8d43b0579f21feadc16014c61bbdbf75c8fdd4d93591707f85e70b" Oct 01 06:02:32 crc kubenswrapper[4661]: I1001 06:02:32.010268 4661 generic.go:334] "Generic (PLEG): container finished" podID="cbe5a564-344a-449a-a457-61e5002621a6" containerID="de8bf22324352e6ef5c1c937f9fcc62a7936bd63d82080ca14f6a280ce456084" exitCode=0 Oct 01 06:02:32 crc kubenswrapper[4661]: I1001 06:02:32.010397 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t" event={"ID":"cbe5a564-344a-449a-a457-61e5002621a6","Type":"ContainerDied","Data":"de8bf22324352e6ef5c1c937f9fcc62a7936bd63d82080ca14f6a280ce456084"} Oct 01 06:02:33 crc kubenswrapper[4661]: I1001 06:02:33.547936 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t" Oct 01 06:02:33 crc kubenswrapper[4661]: I1001 06:02:33.552396 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cbe5a564-344a-449a-a457-61e5002621a6-inventory\") pod \"cbe5a564-344a-449a-a457-61e5002621a6\" (UID: \"cbe5a564-344a-449a-a457-61e5002621a6\") " Oct 01 06:02:33 crc kubenswrapper[4661]: I1001 06:02:33.552459 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqb82\" (UniqueName: \"kubernetes.io/projected/cbe5a564-344a-449a-a457-61e5002621a6-kube-api-access-tqb82\") pod \"cbe5a564-344a-449a-a457-61e5002621a6\" (UID: \"cbe5a564-344a-449a-a457-61e5002621a6\") " Oct 01 06:02:33 crc kubenswrapper[4661]: I1001 06:02:33.552559 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cbe5a564-344a-449a-a457-61e5002621a6-ssh-key\") pod \"cbe5a564-344a-449a-a457-61e5002621a6\" (UID: \"cbe5a564-344a-449a-a457-61e5002621a6\") " Oct 01 06:02:33 crc kubenswrapper[4661]: I1001 06:02:33.557620 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbe5a564-344a-449a-a457-61e5002621a6-kube-api-access-tqb82" (OuterVolumeSpecName: "kube-api-access-tqb82") pod "cbe5a564-344a-449a-a457-61e5002621a6" (UID: "cbe5a564-344a-449a-a457-61e5002621a6"). InnerVolumeSpecName "kube-api-access-tqb82". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:02:33 crc kubenswrapper[4661]: I1001 06:02:33.585652 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbe5a564-344a-449a-a457-61e5002621a6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "cbe5a564-344a-449a-a457-61e5002621a6" (UID: "cbe5a564-344a-449a-a457-61e5002621a6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:02:33 crc kubenswrapper[4661]: I1001 06:02:33.591939 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbe5a564-344a-449a-a457-61e5002621a6-inventory" (OuterVolumeSpecName: "inventory") pod "cbe5a564-344a-449a-a457-61e5002621a6" (UID: "cbe5a564-344a-449a-a457-61e5002621a6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:02:33 crc kubenswrapper[4661]: I1001 06:02:33.654272 4661 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cbe5a564-344a-449a-a457-61e5002621a6-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 06:02:33 crc kubenswrapper[4661]: I1001 06:02:33.654305 4661 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cbe5a564-344a-449a-a457-61e5002621a6-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 06:02:33 crc kubenswrapper[4661]: I1001 06:02:33.654315 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqb82\" (UniqueName: \"kubernetes.io/projected/cbe5a564-344a-449a-a457-61e5002621a6-kube-api-access-tqb82\") on node \"crc\" DevicePath \"\"" Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.036771 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t" Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.036613 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t" event={"ID":"cbe5a564-344a-449a-a457-61e5002621a6","Type":"ContainerDied","Data":"baa0e29e85f1ef42cee1a52aa9dd4aa4c6c4eed9ced4001fb260db499cb6e9e4"} Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.036851 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="baa0e29e85f1ef42cee1a52aa9dd4aa4c6c4eed9ced4001fb260db499cb6e9e4" Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.143054 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-fzw95"] Oct 01 06:02:34 crc kubenswrapper[4661]: E1001 06:02:34.143603 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbe5a564-344a-449a-a457-61e5002621a6" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.143643 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbe5a564-344a-449a-a457-61e5002621a6" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.143909 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbe5a564-344a-449a-a457-61e5002621a6" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.144749 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-fzw95" Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.148180 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.148476 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.148702 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-srk7f" Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.149006 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.159485 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-fzw95"] Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.166961 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7d367bb3-134b-460e-8cff-75aad6a88043-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-fzw95\" (UID: \"7d367bb3-134b-460e-8cff-75aad6a88043\") " pod="openstack/ssh-known-hosts-edpm-deployment-fzw95" Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.167285 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/7d367bb3-134b-460e-8cff-75aad6a88043-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-fzw95\" (UID: \"7d367bb3-134b-460e-8cff-75aad6a88043\") " pod="openstack/ssh-known-hosts-edpm-deployment-fzw95" Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.167423 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwvgd\" (UniqueName: \"kubernetes.io/projected/7d367bb3-134b-460e-8cff-75aad6a88043-kube-api-access-xwvgd\") pod \"ssh-known-hosts-edpm-deployment-fzw95\" (UID: \"7d367bb3-134b-460e-8cff-75aad6a88043\") " pod="openstack/ssh-known-hosts-edpm-deployment-fzw95" Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.268401 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7d367bb3-134b-460e-8cff-75aad6a88043-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-fzw95\" (UID: \"7d367bb3-134b-460e-8cff-75aad6a88043\") " pod="openstack/ssh-known-hosts-edpm-deployment-fzw95" Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.269039 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/7d367bb3-134b-460e-8cff-75aad6a88043-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-fzw95\" (UID: \"7d367bb3-134b-460e-8cff-75aad6a88043\") " pod="openstack/ssh-known-hosts-edpm-deployment-fzw95" Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.269196 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwvgd\" (UniqueName: \"kubernetes.io/projected/7d367bb3-134b-460e-8cff-75aad6a88043-kube-api-access-xwvgd\") pod \"ssh-known-hosts-edpm-deployment-fzw95\" (UID: \"7d367bb3-134b-460e-8cff-75aad6a88043\") " pod="openstack/ssh-known-hosts-edpm-deployment-fzw95" Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.272660 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7d367bb3-134b-460e-8cff-75aad6a88043-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-fzw95\" (UID: \"7d367bb3-134b-460e-8cff-75aad6a88043\") " pod="openstack/ssh-known-hosts-edpm-deployment-fzw95" Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.272808 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/7d367bb3-134b-460e-8cff-75aad6a88043-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-fzw95\" (UID: \"7d367bb3-134b-460e-8cff-75aad6a88043\") " pod="openstack/ssh-known-hosts-edpm-deployment-fzw95" Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.295189 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwvgd\" (UniqueName: \"kubernetes.io/projected/7d367bb3-134b-460e-8cff-75aad6a88043-kube-api-access-xwvgd\") pod \"ssh-known-hosts-edpm-deployment-fzw95\" (UID: \"7d367bb3-134b-460e-8cff-75aad6a88043\") " pod="openstack/ssh-known-hosts-edpm-deployment-fzw95" Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.309173 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.309255 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:02:34 crc kubenswrapper[4661]: I1001 06:02:34.461831 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-fzw95" Oct 01 06:02:35 crc kubenswrapper[4661]: I1001 06:02:35.038746 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-fzw95"] Oct 01 06:02:35 crc kubenswrapper[4661]: W1001 06:02:35.046339 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d367bb3_134b_460e_8cff_75aad6a88043.slice/crio-c22be18a665ac728679967f14ed843920725131ebbfbcf9b3ba060cc11e7ee0e WatchSource:0}: Error finding container c22be18a665ac728679967f14ed843920725131ebbfbcf9b3ba060cc11e7ee0e: Status 404 returned error can't find the container with id c22be18a665ac728679967f14ed843920725131ebbfbcf9b3ba060cc11e7ee0e Oct 01 06:02:36 crc kubenswrapper[4661]: I1001 06:02:36.060209 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-fzw95" event={"ID":"7d367bb3-134b-460e-8cff-75aad6a88043","Type":"ContainerStarted","Data":"36d5ef507897ec23789d5133a0426e34d56ac4376a62da277593998d0086e6ba"} Oct 01 06:02:36 crc kubenswrapper[4661]: I1001 06:02:36.060575 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-fzw95" event={"ID":"7d367bb3-134b-460e-8cff-75aad6a88043","Type":"ContainerStarted","Data":"c22be18a665ac728679967f14ed843920725131ebbfbcf9b3ba060cc11e7ee0e"} Oct 01 06:02:36 crc kubenswrapper[4661]: I1001 06:02:36.089013 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-fzw95" podStartSLOduration=1.588641795 podStartE2EDuration="2.08898577s" podCreationTimestamp="2025-10-01 06:02:34 +0000 UTC" firstStartedPulling="2025-10-01 06:02:35.04856778 +0000 UTC m=+2003.986546404" lastFinishedPulling="2025-10-01 06:02:35.548911765 +0000 UTC m=+2004.486890379" observedRunningTime="2025-10-01 06:02:36.077735274 +0000 UTC m=+2005.015713918" watchObservedRunningTime="2025-10-01 06:02:36.08898577 +0000 UTC m=+2005.026964414" Oct 01 06:02:44 crc kubenswrapper[4661]: I1001 06:02:44.149297 4661 generic.go:334] "Generic (PLEG): container finished" podID="7d367bb3-134b-460e-8cff-75aad6a88043" containerID="36d5ef507897ec23789d5133a0426e34d56ac4376a62da277593998d0086e6ba" exitCode=0 Oct 01 06:02:44 crc kubenswrapper[4661]: I1001 06:02:44.149366 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-fzw95" event={"ID":"7d367bb3-134b-460e-8cff-75aad6a88043","Type":"ContainerDied","Data":"36d5ef507897ec23789d5133a0426e34d56ac4376a62da277593998d0086e6ba"} Oct 01 06:02:45 crc kubenswrapper[4661]: I1001 06:02:45.634951 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-fzw95" Oct 01 06:02:45 crc kubenswrapper[4661]: I1001 06:02:45.809751 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/7d367bb3-134b-460e-8cff-75aad6a88043-inventory-0\") pod \"7d367bb3-134b-460e-8cff-75aad6a88043\" (UID: \"7d367bb3-134b-460e-8cff-75aad6a88043\") " Oct 01 06:02:45 crc kubenswrapper[4661]: I1001 06:02:45.810067 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7d367bb3-134b-460e-8cff-75aad6a88043-ssh-key-openstack-edpm-ipam\") pod \"7d367bb3-134b-460e-8cff-75aad6a88043\" (UID: \"7d367bb3-134b-460e-8cff-75aad6a88043\") " Oct 01 06:02:45 crc kubenswrapper[4661]: I1001 06:02:45.810160 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwvgd\" (UniqueName: \"kubernetes.io/projected/7d367bb3-134b-460e-8cff-75aad6a88043-kube-api-access-xwvgd\") pod \"7d367bb3-134b-460e-8cff-75aad6a88043\" (UID: \"7d367bb3-134b-460e-8cff-75aad6a88043\") " Oct 01 06:02:45 crc kubenswrapper[4661]: I1001 06:02:45.817333 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d367bb3-134b-460e-8cff-75aad6a88043-kube-api-access-xwvgd" (OuterVolumeSpecName: "kube-api-access-xwvgd") pod "7d367bb3-134b-460e-8cff-75aad6a88043" (UID: "7d367bb3-134b-460e-8cff-75aad6a88043"). InnerVolumeSpecName "kube-api-access-xwvgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:02:45 crc kubenswrapper[4661]: E1001 06:02:45.869712 4661 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d367bb3-134b-460e-8cff-75aad6a88043-ssh-key-openstack-edpm-ipam podName:7d367bb3-134b-460e-8cff-75aad6a88043 nodeName:}" failed. No retries permitted until 2025-10-01 06:02:46.369681421 +0000 UTC m=+2015.307660055 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ssh-key-openstack-edpm-ipam" (UniqueName: "kubernetes.io/secret/7d367bb3-134b-460e-8cff-75aad6a88043-ssh-key-openstack-edpm-ipam") pod "7d367bb3-134b-460e-8cff-75aad6a88043" (UID: "7d367bb3-134b-460e-8cff-75aad6a88043") : error deleting /var/lib/kubelet/pods/7d367bb3-134b-460e-8cff-75aad6a88043/volume-subpaths: remove /var/lib/kubelet/pods/7d367bb3-134b-460e-8cff-75aad6a88043/volume-subpaths: no such file or directory Oct 01 06:02:45 crc kubenswrapper[4661]: I1001 06:02:45.873944 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d367bb3-134b-460e-8cff-75aad6a88043-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "7d367bb3-134b-460e-8cff-75aad6a88043" (UID: "7d367bb3-134b-460e-8cff-75aad6a88043"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:02:45 crc kubenswrapper[4661]: I1001 06:02:45.913972 4661 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/7d367bb3-134b-460e-8cff-75aad6a88043-inventory-0\") on node \"crc\" DevicePath \"\"" Oct 01 06:02:45 crc kubenswrapper[4661]: I1001 06:02:45.914042 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwvgd\" (UniqueName: \"kubernetes.io/projected/7d367bb3-134b-460e-8cff-75aad6a88043-kube-api-access-xwvgd\") on node \"crc\" DevicePath \"\"" Oct 01 06:02:46 crc kubenswrapper[4661]: I1001 06:02:46.175049 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-fzw95" event={"ID":"7d367bb3-134b-460e-8cff-75aad6a88043","Type":"ContainerDied","Data":"c22be18a665ac728679967f14ed843920725131ebbfbcf9b3ba060cc11e7ee0e"} Oct 01 06:02:46 crc kubenswrapper[4661]: I1001 06:02:46.175088 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c22be18a665ac728679967f14ed843920725131ebbfbcf9b3ba060cc11e7ee0e" Oct 01 06:02:46 crc kubenswrapper[4661]: I1001 06:02:46.175168 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-fzw95" Oct 01 06:02:46 crc kubenswrapper[4661]: I1001 06:02:46.275800 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwf7d"] Oct 01 06:02:46 crc kubenswrapper[4661]: E1001 06:02:46.276430 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d367bb3-134b-460e-8cff-75aad6a88043" containerName="ssh-known-hosts-edpm-deployment" Oct 01 06:02:46 crc kubenswrapper[4661]: I1001 06:02:46.276495 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d367bb3-134b-460e-8cff-75aad6a88043" containerName="ssh-known-hosts-edpm-deployment" Oct 01 06:02:46 crc kubenswrapper[4661]: I1001 06:02:46.276798 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d367bb3-134b-460e-8cff-75aad6a88043" containerName="ssh-known-hosts-edpm-deployment" Oct 01 06:02:46 crc kubenswrapper[4661]: I1001 06:02:46.277490 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwf7d" Oct 01 06:02:46 crc kubenswrapper[4661]: I1001 06:02:46.308502 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwf7d"] Oct 01 06:02:46 crc kubenswrapper[4661]: I1001 06:02:46.425341 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7d367bb3-134b-460e-8cff-75aad6a88043-ssh-key-openstack-edpm-ipam\") pod \"7d367bb3-134b-460e-8cff-75aad6a88043\" (UID: \"7d367bb3-134b-460e-8cff-75aad6a88043\") " Oct 01 06:02:46 crc kubenswrapper[4661]: I1001 06:02:46.426078 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/daed5adf-0ad7-4236-abc5-fcd5053645b7-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fwf7d\" (UID: \"daed5adf-0ad7-4236-abc5-fcd5053645b7\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwf7d" Oct 01 06:02:46 crc kubenswrapper[4661]: I1001 06:02:46.426191 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cmdw\" (UniqueName: \"kubernetes.io/projected/daed5adf-0ad7-4236-abc5-fcd5053645b7-kube-api-access-6cmdw\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fwf7d\" (UID: \"daed5adf-0ad7-4236-abc5-fcd5053645b7\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwf7d" Oct 01 06:02:46 crc kubenswrapper[4661]: I1001 06:02:46.426341 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/daed5adf-0ad7-4236-abc5-fcd5053645b7-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fwf7d\" (UID: \"daed5adf-0ad7-4236-abc5-fcd5053645b7\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwf7d" Oct 01 06:02:46 crc kubenswrapper[4661]: I1001 06:02:46.429088 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d367bb3-134b-460e-8cff-75aad6a88043-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "7d367bb3-134b-460e-8cff-75aad6a88043" (UID: "7d367bb3-134b-460e-8cff-75aad6a88043"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:02:46 crc kubenswrapper[4661]: I1001 06:02:46.527917 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/daed5adf-0ad7-4236-abc5-fcd5053645b7-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fwf7d\" (UID: \"daed5adf-0ad7-4236-abc5-fcd5053645b7\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwf7d" Oct 01 06:02:46 crc kubenswrapper[4661]: I1001 06:02:46.527972 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cmdw\" (UniqueName: \"kubernetes.io/projected/daed5adf-0ad7-4236-abc5-fcd5053645b7-kube-api-access-6cmdw\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fwf7d\" (UID: \"daed5adf-0ad7-4236-abc5-fcd5053645b7\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwf7d" Oct 01 06:02:46 crc kubenswrapper[4661]: I1001 06:02:46.528064 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/daed5adf-0ad7-4236-abc5-fcd5053645b7-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fwf7d\" (UID: \"daed5adf-0ad7-4236-abc5-fcd5053645b7\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwf7d" Oct 01 06:02:46 crc kubenswrapper[4661]: I1001 06:02:46.528121 4661 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7d367bb3-134b-460e-8cff-75aad6a88043-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 01 06:02:46 crc kubenswrapper[4661]: I1001 06:02:46.532047 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/daed5adf-0ad7-4236-abc5-fcd5053645b7-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fwf7d\" (UID: \"daed5adf-0ad7-4236-abc5-fcd5053645b7\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwf7d" Oct 01 06:02:46 crc kubenswrapper[4661]: I1001 06:02:46.533122 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/daed5adf-0ad7-4236-abc5-fcd5053645b7-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fwf7d\" (UID: \"daed5adf-0ad7-4236-abc5-fcd5053645b7\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwf7d" Oct 01 06:02:46 crc kubenswrapper[4661]: I1001 06:02:46.546902 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cmdw\" (UniqueName: \"kubernetes.io/projected/daed5adf-0ad7-4236-abc5-fcd5053645b7-kube-api-access-6cmdw\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fwf7d\" (UID: \"daed5adf-0ad7-4236-abc5-fcd5053645b7\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwf7d" Oct 01 06:02:46 crc kubenswrapper[4661]: I1001 06:02:46.599549 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwf7d" Oct 01 06:02:47 crc kubenswrapper[4661]: I1001 06:02:47.194084 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwf7d"] Oct 01 06:02:48 crc kubenswrapper[4661]: I1001 06:02:48.216435 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwf7d" event={"ID":"daed5adf-0ad7-4236-abc5-fcd5053645b7","Type":"ContainerStarted","Data":"7389908c99997933d4d548c31446e7460469e5316248532ca6b3bb44b1ef0fd5"} Oct 01 06:02:48 crc kubenswrapper[4661]: I1001 06:02:48.216932 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwf7d" event={"ID":"daed5adf-0ad7-4236-abc5-fcd5053645b7","Type":"ContainerStarted","Data":"e61b41ce85341444f3959a6cde8212c715679363d22e3c718eae4bb1056a5894"} Oct 01 06:02:48 crc kubenswrapper[4661]: I1001 06:02:48.238756 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwf7d" podStartSLOduration=1.739495798 podStartE2EDuration="2.238732296s" podCreationTimestamp="2025-10-01 06:02:46 +0000 UTC" firstStartedPulling="2025-10-01 06:02:47.208286412 +0000 UTC m=+2016.146265066" lastFinishedPulling="2025-10-01 06:02:47.70752291 +0000 UTC m=+2016.645501564" observedRunningTime="2025-10-01 06:02:48.230992845 +0000 UTC m=+2017.168971469" watchObservedRunningTime="2025-10-01 06:02:48.238732296 +0000 UTC m=+2017.176710930" Oct 01 06:02:58 crc kubenswrapper[4661]: I1001 06:02:58.328214 4661 generic.go:334] "Generic (PLEG): container finished" podID="daed5adf-0ad7-4236-abc5-fcd5053645b7" containerID="7389908c99997933d4d548c31446e7460469e5316248532ca6b3bb44b1ef0fd5" exitCode=0 Oct 01 06:02:58 crc kubenswrapper[4661]: I1001 06:02:58.328280 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwf7d" event={"ID":"daed5adf-0ad7-4236-abc5-fcd5053645b7","Type":"ContainerDied","Data":"7389908c99997933d4d548c31446e7460469e5316248532ca6b3bb44b1ef0fd5"} Oct 01 06:02:59 crc kubenswrapper[4661]: I1001 06:02:59.882222 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwf7d" Oct 01 06:02:59 crc kubenswrapper[4661]: I1001 06:02:59.995475 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/daed5adf-0ad7-4236-abc5-fcd5053645b7-ssh-key\") pod \"daed5adf-0ad7-4236-abc5-fcd5053645b7\" (UID: \"daed5adf-0ad7-4236-abc5-fcd5053645b7\") " Oct 01 06:02:59 crc kubenswrapper[4661]: I1001 06:02:59.995733 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6cmdw\" (UniqueName: \"kubernetes.io/projected/daed5adf-0ad7-4236-abc5-fcd5053645b7-kube-api-access-6cmdw\") pod \"daed5adf-0ad7-4236-abc5-fcd5053645b7\" (UID: \"daed5adf-0ad7-4236-abc5-fcd5053645b7\") " Oct 01 06:02:59 crc kubenswrapper[4661]: I1001 06:02:59.995795 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/daed5adf-0ad7-4236-abc5-fcd5053645b7-inventory\") pod \"daed5adf-0ad7-4236-abc5-fcd5053645b7\" (UID: \"daed5adf-0ad7-4236-abc5-fcd5053645b7\") " Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.003933 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/daed5adf-0ad7-4236-abc5-fcd5053645b7-kube-api-access-6cmdw" (OuterVolumeSpecName: "kube-api-access-6cmdw") pod "daed5adf-0ad7-4236-abc5-fcd5053645b7" (UID: "daed5adf-0ad7-4236-abc5-fcd5053645b7"). InnerVolumeSpecName "kube-api-access-6cmdw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.044042 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/daed5adf-0ad7-4236-abc5-fcd5053645b7-inventory" (OuterVolumeSpecName: "inventory") pod "daed5adf-0ad7-4236-abc5-fcd5053645b7" (UID: "daed5adf-0ad7-4236-abc5-fcd5053645b7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.046132 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/daed5adf-0ad7-4236-abc5-fcd5053645b7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "daed5adf-0ad7-4236-abc5-fcd5053645b7" (UID: "daed5adf-0ad7-4236-abc5-fcd5053645b7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.098321 4661 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/daed5adf-0ad7-4236-abc5-fcd5053645b7-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.098361 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6cmdw\" (UniqueName: \"kubernetes.io/projected/daed5adf-0ad7-4236-abc5-fcd5053645b7-kube-api-access-6cmdw\") on node \"crc\" DevicePath \"\"" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.098376 4661 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/daed5adf-0ad7-4236-abc5-fcd5053645b7-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.354423 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwf7d" event={"ID":"daed5adf-0ad7-4236-abc5-fcd5053645b7","Type":"ContainerDied","Data":"e61b41ce85341444f3959a6cde8212c715679363d22e3c718eae4bb1056a5894"} Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.354477 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e61b41ce85341444f3959a6cde8212c715679363d22e3c718eae4bb1056a5894" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.354475 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwf7d" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.453934 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv"] Oct 01 06:03:00 crc kubenswrapper[4661]: E1001 06:03:00.454717 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daed5adf-0ad7-4236-abc5-fcd5053645b7" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.454760 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="daed5adf-0ad7-4236-abc5-fcd5053645b7" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.455181 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="daed5adf-0ad7-4236-abc5-fcd5053645b7" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.456230 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.458756 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.458908 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.459013 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-srk7f" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.462948 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.467623 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv"] Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.606323 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vk8rm\" (UniqueName: \"kubernetes.io/projected/c9202962-2893-458d-996c-2890fa302029-kube-api-access-vk8rm\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv\" (UID: \"c9202962-2893-458d-996c-2890fa302029\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.606483 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9202962-2893-458d-996c-2890fa302029-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv\" (UID: \"c9202962-2893-458d-996c-2890fa302029\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.606535 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9202962-2893-458d-996c-2890fa302029-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv\" (UID: \"c9202962-2893-458d-996c-2890fa302029\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.707829 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vk8rm\" (UniqueName: \"kubernetes.io/projected/c9202962-2893-458d-996c-2890fa302029-kube-api-access-vk8rm\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv\" (UID: \"c9202962-2893-458d-996c-2890fa302029\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.708287 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9202962-2893-458d-996c-2890fa302029-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv\" (UID: \"c9202962-2893-458d-996c-2890fa302029\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.708376 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9202962-2893-458d-996c-2890fa302029-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv\" (UID: \"c9202962-2893-458d-996c-2890fa302029\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.712054 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9202962-2893-458d-996c-2890fa302029-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv\" (UID: \"c9202962-2893-458d-996c-2890fa302029\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.713015 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9202962-2893-458d-996c-2890fa302029-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv\" (UID: \"c9202962-2893-458d-996c-2890fa302029\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.723489 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vk8rm\" (UniqueName: \"kubernetes.io/projected/c9202962-2893-458d-996c-2890fa302029-kube-api-access-vk8rm\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv\" (UID: \"c9202962-2893-458d-996c-2890fa302029\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv" Oct 01 06:03:00 crc kubenswrapper[4661]: I1001 06:03:00.787503 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv" Oct 01 06:03:01 crc kubenswrapper[4661]: I1001 06:03:01.343287 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv"] Oct 01 06:03:01 crc kubenswrapper[4661]: I1001 06:03:01.368952 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv" event={"ID":"c9202962-2893-458d-996c-2890fa302029","Type":"ContainerStarted","Data":"bede2ab5effbe93da0324cb9458d90b2d50fd3e91f85909460083e831abd343b"} Oct 01 06:03:02 crc kubenswrapper[4661]: I1001 06:03:02.391017 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv" event={"ID":"c9202962-2893-458d-996c-2890fa302029","Type":"ContainerStarted","Data":"423dfbad8c279fd9da7addf35c3cf3bd78331636f1fcbf2e849faf7e26945789"} Oct 01 06:03:02 crc kubenswrapper[4661]: I1001 06:03:02.431894 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv" podStartSLOduration=1.946842593 podStartE2EDuration="2.431864076s" podCreationTimestamp="2025-10-01 06:03:00 +0000 UTC" firstStartedPulling="2025-10-01 06:03:01.347540581 +0000 UTC m=+2030.285519225" lastFinishedPulling="2025-10-01 06:03:01.832562054 +0000 UTC m=+2030.770540708" observedRunningTime="2025-10-01 06:03:02.415493762 +0000 UTC m=+2031.353472446" watchObservedRunningTime="2025-10-01 06:03:02.431864076 +0000 UTC m=+2031.369842730" Oct 01 06:03:04 crc kubenswrapper[4661]: I1001 06:03:04.309409 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:03:04 crc kubenswrapper[4661]: I1001 06:03:04.310087 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:03:04 crc kubenswrapper[4661]: I1001 06:03:04.310163 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 06:03:04 crc kubenswrapper[4661]: I1001 06:03:04.311192 4661 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9945639cebecb82e62ea77554393d94ec336d0d27f9ce3233bff36e74217a74a"} pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 06:03:04 crc kubenswrapper[4661]: I1001 06:03:04.311309 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" containerID="cri-o://9945639cebecb82e62ea77554393d94ec336d0d27f9ce3233bff36e74217a74a" gracePeriod=600 Oct 01 06:03:05 crc kubenswrapper[4661]: I1001 06:03:05.452985 4661 generic.go:334] "Generic (PLEG): container finished" podID="7584c4bc-4202-487e-a2b4-4319f428a792" containerID="9945639cebecb82e62ea77554393d94ec336d0d27f9ce3233bff36e74217a74a" exitCode=0 Oct 01 06:03:05 crc kubenswrapper[4661]: I1001 06:03:05.453135 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerDied","Data":"9945639cebecb82e62ea77554393d94ec336d0d27f9ce3233bff36e74217a74a"} Oct 01 06:03:05 crc kubenswrapper[4661]: I1001 06:03:05.454934 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a"} Oct 01 06:03:05 crc kubenswrapper[4661]: I1001 06:03:05.454992 4661 scope.go:117] "RemoveContainer" containerID="d940ccddb6db6ce9092db3f50d84fd0ae5f1836a68dc8fb7cee216eefb0483ad" Oct 01 06:03:12 crc kubenswrapper[4661]: I1001 06:03:12.546991 4661 generic.go:334] "Generic (PLEG): container finished" podID="c9202962-2893-458d-996c-2890fa302029" containerID="423dfbad8c279fd9da7addf35c3cf3bd78331636f1fcbf2e849faf7e26945789" exitCode=0 Oct 01 06:03:12 crc kubenswrapper[4661]: I1001 06:03:12.547215 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv" event={"ID":"c9202962-2893-458d-996c-2890fa302029","Type":"ContainerDied","Data":"423dfbad8c279fd9da7addf35c3cf3bd78331636f1fcbf2e849faf7e26945789"} Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.037250 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.214539 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9202962-2893-458d-996c-2890fa302029-inventory\") pod \"c9202962-2893-458d-996c-2890fa302029\" (UID: \"c9202962-2893-458d-996c-2890fa302029\") " Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.214670 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9202962-2893-458d-996c-2890fa302029-ssh-key\") pod \"c9202962-2893-458d-996c-2890fa302029\" (UID: \"c9202962-2893-458d-996c-2890fa302029\") " Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.214844 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vk8rm\" (UniqueName: \"kubernetes.io/projected/c9202962-2893-458d-996c-2890fa302029-kube-api-access-vk8rm\") pod \"c9202962-2893-458d-996c-2890fa302029\" (UID: \"c9202962-2893-458d-996c-2890fa302029\") " Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.230833 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9202962-2893-458d-996c-2890fa302029-kube-api-access-vk8rm" (OuterVolumeSpecName: "kube-api-access-vk8rm") pod "c9202962-2893-458d-996c-2890fa302029" (UID: "c9202962-2893-458d-996c-2890fa302029"). InnerVolumeSpecName "kube-api-access-vk8rm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.247088 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9202962-2893-458d-996c-2890fa302029-inventory" (OuterVolumeSpecName: "inventory") pod "c9202962-2893-458d-996c-2890fa302029" (UID: "c9202962-2893-458d-996c-2890fa302029"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.261173 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9202962-2893-458d-996c-2890fa302029-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c9202962-2893-458d-996c-2890fa302029" (UID: "c9202962-2893-458d-996c-2890fa302029"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.317541 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vk8rm\" (UniqueName: \"kubernetes.io/projected/c9202962-2893-458d-996c-2890fa302029-kube-api-access-vk8rm\") on node \"crc\" DevicePath \"\"" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.317576 4661 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9202962-2893-458d-996c-2890fa302029-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.317585 4661 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9202962-2893-458d-996c-2890fa302029-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.572813 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv" event={"ID":"c9202962-2893-458d-996c-2890fa302029","Type":"ContainerDied","Data":"bede2ab5effbe93da0324cb9458d90b2d50fd3e91f85909460083e831abd343b"} Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.572890 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bede2ab5effbe93da0324cb9458d90b2d50fd3e91f85909460083e831abd343b" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.572924 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.694102 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst"] Oct 01 06:03:14 crc kubenswrapper[4661]: E1001 06:03:14.694607 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9202962-2893-458d-996c-2890fa302029" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.694650 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9202962-2893-458d-996c-2890fa302029" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.694933 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9202962-2893-458d-996c-2890fa302029" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.695734 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.700784 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.701465 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.701705 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.701821 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-srk7f" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.701873 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.701896 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.702339 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.706112 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.720742 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst"] Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.836720 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.837175 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.837276 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.837408 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.837442 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.837491 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.837539 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.837611 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.837689 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flw5k\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-kube-api-access-flw5k\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.837719 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.837744 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.837807 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.837842 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.837897 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.940455 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.940512 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.940565 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.940608 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.940658 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.940694 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.940726 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.940760 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.940785 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flw5k\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-kube-api-access-flw5k\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.940803 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.940833 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.940858 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.940882 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.940909 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.945327 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.946427 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.946989 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.947098 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.947178 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.948113 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.948392 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.948840 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.949167 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.950689 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.953989 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.954334 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.958347 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:14 crc kubenswrapper[4661]: I1001 06:03:14.969466 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flw5k\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-kube-api-access-flw5k\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-nvtst\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:15 crc kubenswrapper[4661]: I1001 06:03:15.019358 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:03:15 crc kubenswrapper[4661]: I1001 06:03:15.578831 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst"] Oct 01 06:03:15 crc kubenswrapper[4661]: W1001 06:03:15.581883 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3b1a4183_d18a_4f41_b62a_12f52370c46e.slice/crio-906bc6c469dc56c5e47b85f8eef3d09218d2c56499e864063ccb53dfca7af19a WatchSource:0}: Error finding container 906bc6c469dc56c5e47b85f8eef3d09218d2c56499e864063ccb53dfca7af19a: Status 404 returned error can't find the container with id 906bc6c469dc56c5e47b85f8eef3d09218d2c56499e864063ccb53dfca7af19a Oct 01 06:03:16 crc kubenswrapper[4661]: I1001 06:03:16.597253 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" event={"ID":"3b1a4183-d18a-4f41-b62a-12f52370c46e","Type":"ContainerStarted","Data":"23785482e99f595457ab4f4622bded95855df235348159b3ebca10a2a9568ba1"} Oct 01 06:03:16 crc kubenswrapper[4661]: I1001 06:03:16.597744 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" event={"ID":"3b1a4183-d18a-4f41-b62a-12f52370c46e","Type":"ContainerStarted","Data":"906bc6c469dc56c5e47b85f8eef3d09218d2c56499e864063ccb53dfca7af19a"} Oct 01 06:03:16 crc kubenswrapper[4661]: I1001 06:03:16.626409 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" podStartSLOduration=1.99424164 podStartE2EDuration="2.626386772s" podCreationTimestamp="2025-10-01 06:03:14 +0000 UTC" firstStartedPulling="2025-10-01 06:03:15.585009252 +0000 UTC m=+2044.522987876" lastFinishedPulling="2025-10-01 06:03:16.217154364 +0000 UTC m=+2045.155133008" observedRunningTime="2025-10-01 06:03:16.620961955 +0000 UTC m=+2045.558940569" watchObservedRunningTime="2025-10-01 06:03:16.626386772 +0000 UTC m=+2045.564365386" Oct 01 06:03:25 crc kubenswrapper[4661]: I1001 06:03:25.782932 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-88xwg"] Oct 01 06:03:25 crc kubenswrapper[4661]: I1001 06:03:25.785970 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-88xwg" Oct 01 06:03:25 crc kubenswrapper[4661]: I1001 06:03:25.800523 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-88xwg"] Oct 01 06:03:25 crc kubenswrapper[4661]: I1001 06:03:25.923649 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2815fdd-68a3-45a6-8712-9b2177641014-utilities\") pod \"community-operators-88xwg\" (UID: \"b2815fdd-68a3-45a6-8712-9b2177641014\") " pod="openshift-marketplace/community-operators-88xwg" Oct 01 06:03:25 crc kubenswrapper[4661]: I1001 06:03:25.923986 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2815fdd-68a3-45a6-8712-9b2177641014-catalog-content\") pod \"community-operators-88xwg\" (UID: \"b2815fdd-68a3-45a6-8712-9b2177641014\") " pod="openshift-marketplace/community-operators-88xwg" Oct 01 06:03:25 crc kubenswrapper[4661]: I1001 06:03:25.924146 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hmnv\" (UniqueName: \"kubernetes.io/projected/b2815fdd-68a3-45a6-8712-9b2177641014-kube-api-access-7hmnv\") pod \"community-operators-88xwg\" (UID: \"b2815fdd-68a3-45a6-8712-9b2177641014\") " pod="openshift-marketplace/community-operators-88xwg" Oct 01 06:03:26 crc kubenswrapper[4661]: I1001 06:03:26.025855 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hmnv\" (UniqueName: \"kubernetes.io/projected/b2815fdd-68a3-45a6-8712-9b2177641014-kube-api-access-7hmnv\") pod \"community-operators-88xwg\" (UID: \"b2815fdd-68a3-45a6-8712-9b2177641014\") " pod="openshift-marketplace/community-operators-88xwg" Oct 01 06:03:26 crc kubenswrapper[4661]: I1001 06:03:26.025992 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2815fdd-68a3-45a6-8712-9b2177641014-utilities\") pod \"community-operators-88xwg\" (UID: \"b2815fdd-68a3-45a6-8712-9b2177641014\") " pod="openshift-marketplace/community-operators-88xwg" Oct 01 06:03:26 crc kubenswrapper[4661]: I1001 06:03:26.026015 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2815fdd-68a3-45a6-8712-9b2177641014-catalog-content\") pod \"community-operators-88xwg\" (UID: \"b2815fdd-68a3-45a6-8712-9b2177641014\") " pod="openshift-marketplace/community-operators-88xwg" Oct 01 06:03:26 crc kubenswrapper[4661]: I1001 06:03:26.026451 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2815fdd-68a3-45a6-8712-9b2177641014-catalog-content\") pod \"community-operators-88xwg\" (UID: \"b2815fdd-68a3-45a6-8712-9b2177641014\") " pod="openshift-marketplace/community-operators-88xwg" Oct 01 06:03:26 crc kubenswrapper[4661]: I1001 06:03:26.026493 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2815fdd-68a3-45a6-8712-9b2177641014-utilities\") pod \"community-operators-88xwg\" (UID: \"b2815fdd-68a3-45a6-8712-9b2177641014\") " pod="openshift-marketplace/community-operators-88xwg" Oct 01 06:03:26 crc kubenswrapper[4661]: I1001 06:03:26.047113 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hmnv\" (UniqueName: \"kubernetes.io/projected/b2815fdd-68a3-45a6-8712-9b2177641014-kube-api-access-7hmnv\") pod \"community-operators-88xwg\" (UID: \"b2815fdd-68a3-45a6-8712-9b2177641014\") " pod="openshift-marketplace/community-operators-88xwg" Oct 01 06:03:26 crc kubenswrapper[4661]: I1001 06:03:26.127832 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-88xwg" Oct 01 06:03:26 crc kubenswrapper[4661]: I1001 06:03:26.648255 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-88xwg"] Oct 01 06:03:26 crc kubenswrapper[4661]: I1001 06:03:26.709781 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-88xwg" event={"ID":"b2815fdd-68a3-45a6-8712-9b2177641014","Type":"ContainerStarted","Data":"34012f5837078ebe6cc5f1b4dfb90c7fee4c2ec6e480829bb2ab7cef8c3bb40d"} Oct 01 06:03:27 crc kubenswrapper[4661]: I1001 06:03:27.718178 4661 generic.go:334] "Generic (PLEG): container finished" podID="b2815fdd-68a3-45a6-8712-9b2177641014" containerID="4b8bb83e935b2fc3ab201c4d3a20e4f85e13fdb2f7a53b87293db8cc80f1b0c5" exitCode=0 Oct 01 06:03:27 crc kubenswrapper[4661]: I1001 06:03:27.718274 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-88xwg" event={"ID":"b2815fdd-68a3-45a6-8712-9b2177641014","Type":"ContainerDied","Data":"4b8bb83e935b2fc3ab201c4d3a20e4f85e13fdb2f7a53b87293db8cc80f1b0c5"} Oct 01 06:03:28 crc kubenswrapper[4661]: I1001 06:03:28.732503 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-88xwg" event={"ID":"b2815fdd-68a3-45a6-8712-9b2177641014","Type":"ContainerStarted","Data":"a8a281230736d74d9329f296093f1dd166827109ee5d25f3ab07ee089e2b027e"} Oct 01 06:03:29 crc kubenswrapper[4661]: I1001 06:03:29.745502 4661 generic.go:334] "Generic (PLEG): container finished" podID="b2815fdd-68a3-45a6-8712-9b2177641014" containerID="a8a281230736d74d9329f296093f1dd166827109ee5d25f3ab07ee089e2b027e" exitCode=0 Oct 01 06:03:29 crc kubenswrapper[4661]: I1001 06:03:29.745553 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-88xwg" event={"ID":"b2815fdd-68a3-45a6-8712-9b2177641014","Type":"ContainerDied","Data":"a8a281230736d74d9329f296093f1dd166827109ee5d25f3ab07ee089e2b027e"} Oct 01 06:03:30 crc kubenswrapper[4661]: I1001 06:03:30.758479 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-88xwg" event={"ID":"b2815fdd-68a3-45a6-8712-9b2177641014","Type":"ContainerStarted","Data":"42236d4816cff055d35351e4123dafd1f6870e931c4ccdfb985cb0e6748d87a0"} Oct 01 06:03:30 crc kubenswrapper[4661]: I1001 06:03:30.788753 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-88xwg" podStartSLOduration=3.326342482 podStartE2EDuration="5.788725226s" podCreationTimestamp="2025-10-01 06:03:25 +0000 UTC" firstStartedPulling="2025-10-01 06:03:27.721047037 +0000 UTC m=+2056.659025651" lastFinishedPulling="2025-10-01 06:03:30.183429771 +0000 UTC m=+2059.121408395" observedRunningTime="2025-10-01 06:03:30.778793896 +0000 UTC m=+2059.716772550" watchObservedRunningTime="2025-10-01 06:03:30.788725226 +0000 UTC m=+2059.726703850" Oct 01 06:03:36 crc kubenswrapper[4661]: I1001 06:03:36.128050 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-88xwg" Oct 01 06:03:36 crc kubenswrapper[4661]: I1001 06:03:36.128552 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-88xwg" Oct 01 06:03:36 crc kubenswrapper[4661]: I1001 06:03:36.205300 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-88xwg" Oct 01 06:03:36 crc kubenswrapper[4661]: I1001 06:03:36.933183 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-88xwg" Oct 01 06:03:37 crc kubenswrapper[4661]: I1001 06:03:37.001593 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-88xwg"] Oct 01 06:03:38 crc kubenswrapper[4661]: I1001 06:03:38.846580 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-88xwg" podUID="b2815fdd-68a3-45a6-8712-9b2177641014" containerName="registry-server" containerID="cri-o://42236d4816cff055d35351e4123dafd1f6870e931c4ccdfb985cb0e6748d87a0" gracePeriod=2 Oct 01 06:03:39 crc kubenswrapper[4661]: I1001 06:03:39.363055 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-88xwg" Oct 01 06:03:39 crc kubenswrapper[4661]: I1001 06:03:39.418008 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hmnv\" (UniqueName: \"kubernetes.io/projected/b2815fdd-68a3-45a6-8712-9b2177641014-kube-api-access-7hmnv\") pod \"b2815fdd-68a3-45a6-8712-9b2177641014\" (UID: \"b2815fdd-68a3-45a6-8712-9b2177641014\") " Oct 01 06:03:39 crc kubenswrapper[4661]: I1001 06:03:39.418317 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2815fdd-68a3-45a6-8712-9b2177641014-utilities\") pod \"b2815fdd-68a3-45a6-8712-9b2177641014\" (UID: \"b2815fdd-68a3-45a6-8712-9b2177641014\") " Oct 01 06:03:39 crc kubenswrapper[4661]: I1001 06:03:39.418442 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2815fdd-68a3-45a6-8712-9b2177641014-catalog-content\") pod \"b2815fdd-68a3-45a6-8712-9b2177641014\" (UID: \"b2815fdd-68a3-45a6-8712-9b2177641014\") " Oct 01 06:03:39 crc kubenswrapper[4661]: I1001 06:03:39.419211 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2815fdd-68a3-45a6-8712-9b2177641014-utilities" (OuterVolumeSpecName: "utilities") pod "b2815fdd-68a3-45a6-8712-9b2177641014" (UID: "b2815fdd-68a3-45a6-8712-9b2177641014"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:03:39 crc kubenswrapper[4661]: I1001 06:03:39.424601 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2815fdd-68a3-45a6-8712-9b2177641014-kube-api-access-7hmnv" (OuterVolumeSpecName: "kube-api-access-7hmnv") pod "b2815fdd-68a3-45a6-8712-9b2177641014" (UID: "b2815fdd-68a3-45a6-8712-9b2177641014"). InnerVolumeSpecName "kube-api-access-7hmnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:03:39 crc kubenswrapper[4661]: I1001 06:03:39.467208 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2815fdd-68a3-45a6-8712-9b2177641014-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b2815fdd-68a3-45a6-8712-9b2177641014" (UID: "b2815fdd-68a3-45a6-8712-9b2177641014"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:03:39 crc kubenswrapper[4661]: I1001 06:03:39.519757 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2815fdd-68a3-45a6-8712-9b2177641014-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:03:39 crc kubenswrapper[4661]: I1001 06:03:39.519790 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hmnv\" (UniqueName: \"kubernetes.io/projected/b2815fdd-68a3-45a6-8712-9b2177641014-kube-api-access-7hmnv\") on node \"crc\" DevicePath \"\"" Oct 01 06:03:39 crc kubenswrapper[4661]: I1001 06:03:39.519800 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2815fdd-68a3-45a6-8712-9b2177641014-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:03:39 crc kubenswrapper[4661]: I1001 06:03:39.863297 4661 generic.go:334] "Generic (PLEG): container finished" podID="b2815fdd-68a3-45a6-8712-9b2177641014" containerID="42236d4816cff055d35351e4123dafd1f6870e931c4ccdfb985cb0e6748d87a0" exitCode=0 Oct 01 06:03:39 crc kubenswrapper[4661]: I1001 06:03:39.863356 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-88xwg" event={"ID":"b2815fdd-68a3-45a6-8712-9b2177641014","Type":"ContainerDied","Data":"42236d4816cff055d35351e4123dafd1f6870e931c4ccdfb985cb0e6748d87a0"} Oct 01 06:03:39 crc kubenswrapper[4661]: I1001 06:03:39.863431 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-88xwg" Oct 01 06:03:39 crc kubenswrapper[4661]: I1001 06:03:39.863460 4661 scope.go:117] "RemoveContainer" containerID="42236d4816cff055d35351e4123dafd1f6870e931c4ccdfb985cb0e6748d87a0" Oct 01 06:03:39 crc kubenswrapper[4661]: I1001 06:03:39.863441 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-88xwg" event={"ID":"b2815fdd-68a3-45a6-8712-9b2177641014","Type":"ContainerDied","Data":"34012f5837078ebe6cc5f1b4dfb90c7fee4c2ec6e480829bb2ab7cef8c3bb40d"} Oct 01 06:03:39 crc kubenswrapper[4661]: I1001 06:03:39.917049 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-88xwg"] Oct 01 06:03:39 crc kubenswrapper[4661]: I1001 06:03:39.924073 4661 scope.go:117] "RemoveContainer" containerID="a8a281230736d74d9329f296093f1dd166827109ee5d25f3ab07ee089e2b027e" Oct 01 06:03:39 crc kubenswrapper[4661]: I1001 06:03:39.928248 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-88xwg"] Oct 01 06:03:39 crc kubenswrapper[4661]: I1001 06:03:39.963531 4661 scope.go:117] "RemoveContainer" containerID="4b8bb83e935b2fc3ab201c4d3a20e4f85e13fdb2f7a53b87293db8cc80f1b0c5" Oct 01 06:03:40 crc kubenswrapper[4661]: I1001 06:03:40.013171 4661 scope.go:117] "RemoveContainer" containerID="42236d4816cff055d35351e4123dafd1f6870e931c4ccdfb985cb0e6748d87a0" Oct 01 06:03:40 crc kubenswrapper[4661]: E1001 06:03:40.013611 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42236d4816cff055d35351e4123dafd1f6870e931c4ccdfb985cb0e6748d87a0\": container with ID starting with 42236d4816cff055d35351e4123dafd1f6870e931c4ccdfb985cb0e6748d87a0 not found: ID does not exist" containerID="42236d4816cff055d35351e4123dafd1f6870e931c4ccdfb985cb0e6748d87a0" Oct 01 06:03:40 crc kubenswrapper[4661]: I1001 06:03:40.013669 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42236d4816cff055d35351e4123dafd1f6870e931c4ccdfb985cb0e6748d87a0"} err="failed to get container status \"42236d4816cff055d35351e4123dafd1f6870e931c4ccdfb985cb0e6748d87a0\": rpc error: code = NotFound desc = could not find container \"42236d4816cff055d35351e4123dafd1f6870e931c4ccdfb985cb0e6748d87a0\": container with ID starting with 42236d4816cff055d35351e4123dafd1f6870e931c4ccdfb985cb0e6748d87a0 not found: ID does not exist" Oct 01 06:03:40 crc kubenswrapper[4661]: I1001 06:03:40.013708 4661 scope.go:117] "RemoveContainer" containerID="a8a281230736d74d9329f296093f1dd166827109ee5d25f3ab07ee089e2b027e" Oct 01 06:03:40 crc kubenswrapper[4661]: E1001 06:03:40.014221 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8a281230736d74d9329f296093f1dd166827109ee5d25f3ab07ee089e2b027e\": container with ID starting with a8a281230736d74d9329f296093f1dd166827109ee5d25f3ab07ee089e2b027e not found: ID does not exist" containerID="a8a281230736d74d9329f296093f1dd166827109ee5d25f3ab07ee089e2b027e" Oct 01 06:03:40 crc kubenswrapper[4661]: I1001 06:03:40.014261 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8a281230736d74d9329f296093f1dd166827109ee5d25f3ab07ee089e2b027e"} err="failed to get container status \"a8a281230736d74d9329f296093f1dd166827109ee5d25f3ab07ee089e2b027e\": rpc error: code = NotFound desc = could not find container \"a8a281230736d74d9329f296093f1dd166827109ee5d25f3ab07ee089e2b027e\": container with ID starting with a8a281230736d74d9329f296093f1dd166827109ee5d25f3ab07ee089e2b027e not found: ID does not exist" Oct 01 06:03:40 crc kubenswrapper[4661]: I1001 06:03:40.014277 4661 scope.go:117] "RemoveContainer" containerID="4b8bb83e935b2fc3ab201c4d3a20e4f85e13fdb2f7a53b87293db8cc80f1b0c5" Oct 01 06:03:40 crc kubenswrapper[4661]: E1001 06:03:40.014800 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b8bb83e935b2fc3ab201c4d3a20e4f85e13fdb2f7a53b87293db8cc80f1b0c5\": container with ID starting with 4b8bb83e935b2fc3ab201c4d3a20e4f85e13fdb2f7a53b87293db8cc80f1b0c5 not found: ID does not exist" containerID="4b8bb83e935b2fc3ab201c4d3a20e4f85e13fdb2f7a53b87293db8cc80f1b0c5" Oct 01 06:03:40 crc kubenswrapper[4661]: I1001 06:03:40.014865 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b8bb83e935b2fc3ab201c4d3a20e4f85e13fdb2f7a53b87293db8cc80f1b0c5"} err="failed to get container status \"4b8bb83e935b2fc3ab201c4d3a20e4f85e13fdb2f7a53b87293db8cc80f1b0c5\": rpc error: code = NotFound desc = could not find container \"4b8bb83e935b2fc3ab201c4d3a20e4f85e13fdb2f7a53b87293db8cc80f1b0c5\": container with ID starting with 4b8bb83e935b2fc3ab201c4d3a20e4f85e13fdb2f7a53b87293db8cc80f1b0c5 not found: ID does not exist" Oct 01 06:03:41 crc kubenswrapper[4661]: I1001 06:03:41.777167 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2815fdd-68a3-45a6-8712-9b2177641014" path="/var/lib/kubelet/pods/b2815fdd-68a3-45a6-8712-9b2177641014/volumes" Oct 01 06:04:02 crc kubenswrapper[4661]: I1001 06:04:02.161738 4661 generic.go:334] "Generic (PLEG): container finished" podID="3b1a4183-d18a-4f41-b62a-12f52370c46e" containerID="23785482e99f595457ab4f4622bded95855df235348159b3ebca10a2a9568ba1" exitCode=0 Oct 01 06:04:02 crc kubenswrapper[4661]: I1001 06:04:02.161963 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" event={"ID":"3b1a4183-d18a-4f41-b62a-12f52370c46e","Type":"ContainerDied","Data":"23785482e99f595457ab4f4622bded95855df235348159b3ebca10a2a9568ba1"} Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.723581 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.871470 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-repo-setup-combined-ca-bundle\") pod \"3b1a4183-d18a-4f41-b62a-12f52370c46e\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.872793 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-neutron-metadata-combined-ca-bundle\") pod \"3b1a4183-d18a-4f41-b62a-12f52370c46e\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.872871 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"3b1a4183-d18a-4f41-b62a-12f52370c46e\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.872914 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flw5k\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-kube-api-access-flw5k\") pod \"3b1a4183-d18a-4f41-b62a-12f52370c46e\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.872970 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-bootstrap-combined-ca-bundle\") pod \"3b1a4183-d18a-4f41-b62a-12f52370c46e\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.873021 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"3b1a4183-d18a-4f41-b62a-12f52370c46e\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.873050 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-libvirt-combined-ca-bundle\") pod \"3b1a4183-d18a-4f41-b62a-12f52370c46e\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.873085 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-ssh-key\") pod \"3b1a4183-d18a-4f41-b62a-12f52370c46e\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.873165 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"3b1a4183-d18a-4f41-b62a-12f52370c46e\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.873199 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-inventory\") pod \"3b1a4183-d18a-4f41-b62a-12f52370c46e\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.873273 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"3b1a4183-d18a-4f41-b62a-12f52370c46e\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.873322 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-ovn-combined-ca-bundle\") pod \"3b1a4183-d18a-4f41-b62a-12f52370c46e\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.873358 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-nova-combined-ca-bundle\") pod \"3b1a4183-d18a-4f41-b62a-12f52370c46e\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.873408 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-telemetry-combined-ca-bundle\") pod \"3b1a4183-d18a-4f41-b62a-12f52370c46e\" (UID: \"3b1a4183-d18a-4f41-b62a-12f52370c46e\") " Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.880826 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "3b1a4183-d18a-4f41-b62a-12f52370c46e" (UID: "3b1a4183-d18a-4f41-b62a-12f52370c46e"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.881414 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "3b1a4183-d18a-4f41-b62a-12f52370c46e" (UID: "3b1a4183-d18a-4f41-b62a-12f52370c46e"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.881413 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "3b1a4183-d18a-4f41-b62a-12f52370c46e" (UID: "3b1a4183-d18a-4f41-b62a-12f52370c46e"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.885363 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "3b1a4183-d18a-4f41-b62a-12f52370c46e" (UID: "3b1a4183-d18a-4f41-b62a-12f52370c46e"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.885487 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "3b1a4183-d18a-4f41-b62a-12f52370c46e" (UID: "3b1a4183-d18a-4f41-b62a-12f52370c46e"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.885506 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "3b1a4183-d18a-4f41-b62a-12f52370c46e" (UID: "3b1a4183-d18a-4f41-b62a-12f52370c46e"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.885584 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "3b1a4183-d18a-4f41-b62a-12f52370c46e" (UID: "3b1a4183-d18a-4f41-b62a-12f52370c46e"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.885622 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "3b1a4183-d18a-4f41-b62a-12f52370c46e" (UID: "3b1a4183-d18a-4f41-b62a-12f52370c46e"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.885709 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-kube-api-access-flw5k" (OuterVolumeSpecName: "kube-api-access-flw5k") pod "3b1a4183-d18a-4f41-b62a-12f52370c46e" (UID: "3b1a4183-d18a-4f41-b62a-12f52370c46e"). InnerVolumeSpecName "kube-api-access-flw5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.885899 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "3b1a4183-d18a-4f41-b62a-12f52370c46e" (UID: "3b1a4183-d18a-4f41-b62a-12f52370c46e"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.886939 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "3b1a4183-d18a-4f41-b62a-12f52370c46e" (UID: "3b1a4183-d18a-4f41-b62a-12f52370c46e"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.889877 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "3b1a4183-d18a-4f41-b62a-12f52370c46e" (UID: "3b1a4183-d18a-4f41-b62a-12f52370c46e"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.907677 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3b1a4183-d18a-4f41-b62a-12f52370c46e" (UID: "3b1a4183-d18a-4f41-b62a-12f52370c46e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.931329 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-inventory" (OuterVolumeSpecName: "inventory") pod "3b1a4183-d18a-4f41-b62a-12f52370c46e" (UID: "3b1a4183-d18a-4f41-b62a-12f52370c46e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.976112 4661 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.976180 4661 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.976206 4661 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.976227 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flw5k\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-kube-api-access-flw5k\") on node \"crc\" DevicePath \"\"" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.976246 4661 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.976263 4661 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.976281 4661 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.976299 4661 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.976316 4661 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.976336 4661 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.976355 4661 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b1a4183-d18a-4f41-b62a-12f52370c46e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.976377 4661 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.976395 4661 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:04:03 crc kubenswrapper[4661]: I1001 06:04:03.976411 4661 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1a4183-d18a-4f41-b62a-12f52370c46e-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.192066 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" event={"ID":"3b1a4183-d18a-4f41-b62a-12f52370c46e","Type":"ContainerDied","Data":"906bc6c469dc56c5e47b85f8eef3d09218d2c56499e864063ccb53dfca7af19a"} Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.192331 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="906bc6c469dc56c5e47b85f8eef3d09218d2c56499e864063ccb53dfca7af19a" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.192117 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-nvtst" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.366656 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft"] Oct 01 06:04:04 crc kubenswrapper[4661]: E1001 06:04:04.366999 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2815fdd-68a3-45a6-8712-9b2177641014" containerName="extract-content" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.367017 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2815fdd-68a3-45a6-8712-9b2177641014" containerName="extract-content" Oct 01 06:04:04 crc kubenswrapper[4661]: E1001 06:04:04.367024 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2815fdd-68a3-45a6-8712-9b2177641014" containerName="registry-server" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.367030 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2815fdd-68a3-45a6-8712-9b2177641014" containerName="registry-server" Oct 01 06:04:04 crc kubenswrapper[4661]: E1001 06:04:04.367052 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b1a4183-d18a-4f41-b62a-12f52370c46e" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.367060 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b1a4183-d18a-4f41-b62a-12f52370c46e" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 01 06:04:04 crc kubenswrapper[4661]: E1001 06:04:04.367074 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2815fdd-68a3-45a6-8712-9b2177641014" containerName="extract-utilities" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.367079 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2815fdd-68a3-45a6-8712-9b2177641014" containerName="extract-utilities" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.367256 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b1a4183-d18a-4f41-b62a-12f52370c46e" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.367275 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2815fdd-68a3-45a6-8712-9b2177641014" containerName="registry-server" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.367884 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.370646 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.370806 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.370964 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.371082 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-srk7f" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.372399 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.381461 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft"] Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.486623 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5dc6881d-aedd-4945-98d8-9993fedd71dd-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-v8wft\" (UID: \"5dc6881d-aedd-4945-98d8-9993fedd71dd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.486850 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkr6c\" (UniqueName: \"kubernetes.io/projected/5dc6881d-aedd-4945-98d8-9993fedd71dd-kube-api-access-tkr6c\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-v8wft\" (UID: \"5dc6881d-aedd-4945-98d8-9993fedd71dd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.486915 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5dc6881d-aedd-4945-98d8-9993fedd71dd-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-v8wft\" (UID: \"5dc6881d-aedd-4945-98d8-9993fedd71dd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.486980 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5dc6881d-aedd-4945-98d8-9993fedd71dd-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-v8wft\" (UID: \"5dc6881d-aedd-4945-98d8-9993fedd71dd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.487121 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5dc6881d-aedd-4945-98d8-9993fedd71dd-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-v8wft\" (UID: \"5dc6881d-aedd-4945-98d8-9993fedd71dd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.588946 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5dc6881d-aedd-4945-98d8-9993fedd71dd-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-v8wft\" (UID: \"5dc6881d-aedd-4945-98d8-9993fedd71dd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.589118 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkr6c\" (UniqueName: \"kubernetes.io/projected/5dc6881d-aedd-4945-98d8-9993fedd71dd-kube-api-access-tkr6c\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-v8wft\" (UID: \"5dc6881d-aedd-4945-98d8-9993fedd71dd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.589167 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5dc6881d-aedd-4945-98d8-9993fedd71dd-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-v8wft\" (UID: \"5dc6881d-aedd-4945-98d8-9993fedd71dd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.589213 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5dc6881d-aedd-4945-98d8-9993fedd71dd-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-v8wft\" (UID: \"5dc6881d-aedd-4945-98d8-9993fedd71dd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.589257 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5dc6881d-aedd-4945-98d8-9993fedd71dd-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-v8wft\" (UID: \"5dc6881d-aedd-4945-98d8-9993fedd71dd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.592723 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5dc6881d-aedd-4945-98d8-9993fedd71dd-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-v8wft\" (UID: \"5dc6881d-aedd-4945-98d8-9993fedd71dd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.595247 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5dc6881d-aedd-4945-98d8-9993fedd71dd-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-v8wft\" (UID: \"5dc6881d-aedd-4945-98d8-9993fedd71dd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.595889 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5dc6881d-aedd-4945-98d8-9993fedd71dd-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-v8wft\" (UID: \"5dc6881d-aedd-4945-98d8-9993fedd71dd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.600891 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5dc6881d-aedd-4945-98d8-9993fedd71dd-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-v8wft\" (UID: \"5dc6881d-aedd-4945-98d8-9993fedd71dd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.616352 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkr6c\" (UniqueName: \"kubernetes.io/projected/5dc6881d-aedd-4945-98d8-9993fedd71dd-kube-api-access-tkr6c\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-v8wft\" (UID: \"5dc6881d-aedd-4945-98d8-9993fedd71dd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" Oct 01 06:04:04 crc kubenswrapper[4661]: I1001 06:04:04.693757 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" Oct 01 06:04:05 crc kubenswrapper[4661]: W1001 06:04:05.081364 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5dc6881d_aedd_4945_98d8_9993fedd71dd.slice/crio-4120fe77254b137dfec3fc31d9c320f76f8afdcaad3227580d5662a948e8832d WatchSource:0}: Error finding container 4120fe77254b137dfec3fc31d9c320f76f8afdcaad3227580d5662a948e8832d: Status 404 returned error can't find the container with id 4120fe77254b137dfec3fc31d9c320f76f8afdcaad3227580d5662a948e8832d Oct 01 06:04:05 crc kubenswrapper[4661]: I1001 06:04:05.100090 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft"] Oct 01 06:04:05 crc kubenswrapper[4661]: I1001 06:04:05.203994 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" event={"ID":"5dc6881d-aedd-4945-98d8-9993fedd71dd","Type":"ContainerStarted","Data":"4120fe77254b137dfec3fc31d9c320f76f8afdcaad3227580d5662a948e8832d"} Oct 01 06:04:06 crc kubenswrapper[4661]: I1001 06:04:06.213274 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" event={"ID":"5dc6881d-aedd-4945-98d8-9993fedd71dd","Type":"ContainerStarted","Data":"ae1bfb9a11d21c19f12c40e964fa72ea1e4006f849bad59fcf2d246e0d832233"} Oct 01 06:04:06 crc kubenswrapper[4661]: I1001 06:04:06.252788 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" podStartSLOduration=1.7080678470000001 podStartE2EDuration="2.252762978s" podCreationTimestamp="2025-10-01 06:04:04 +0000 UTC" firstStartedPulling="2025-10-01 06:04:05.088205118 +0000 UTC m=+2094.026183752" lastFinishedPulling="2025-10-01 06:04:05.632900229 +0000 UTC m=+2094.570878883" observedRunningTime="2025-10-01 06:04:06.238604084 +0000 UTC m=+2095.176582788" watchObservedRunningTime="2025-10-01 06:04:06.252762978 +0000 UTC m=+2095.190741622" Oct 01 06:04:59 crc kubenswrapper[4661]: I1001 06:04:59.120394 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2zs7d"] Oct 01 06:04:59 crc kubenswrapper[4661]: I1001 06:04:59.125926 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2zs7d" Oct 01 06:04:59 crc kubenswrapper[4661]: I1001 06:04:59.137448 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2zs7d"] Oct 01 06:04:59 crc kubenswrapper[4661]: I1001 06:04:59.270651 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4363d0c0-d620-4a1b-a784-b7fdfbd68958-catalog-content\") pod \"redhat-operators-2zs7d\" (UID: \"4363d0c0-d620-4a1b-a784-b7fdfbd68958\") " pod="openshift-marketplace/redhat-operators-2zs7d" Oct 01 06:04:59 crc kubenswrapper[4661]: I1001 06:04:59.270725 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4363d0c0-d620-4a1b-a784-b7fdfbd68958-utilities\") pod \"redhat-operators-2zs7d\" (UID: \"4363d0c0-d620-4a1b-a784-b7fdfbd68958\") " pod="openshift-marketplace/redhat-operators-2zs7d" Oct 01 06:04:59 crc kubenswrapper[4661]: I1001 06:04:59.271101 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mx5cr\" (UniqueName: \"kubernetes.io/projected/4363d0c0-d620-4a1b-a784-b7fdfbd68958-kube-api-access-mx5cr\") pod \"redhat-operators-2zs7d\" (UID: \"4363d0c0-d620-4a1b-a784-b7fdfbd68958\") " pod="openshift-marketplace/redhat-operators-2zs7d" Oct 01 06:04:59 crc kubenswrapper[4661]: I1001 06:04:59.373279 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4363d0c0-d620-4a1b-a784-b7fdfbd68958-utilities\") pod \"redhat-operators-2zs7d\" (UID: \"4363d0c0-d620-4a1b-a784-b7fdfbd68958\") " pod="openshift-marketplace/redhat-operators-2zs7d" Oct 01 06:04:59 crc kubenswrapper[4661]: I1001 06:04:59.373463 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mx5cr\" (UniqueName: \"kubernetes.io/projected/4363d0c0-d620-4a1b-a784-b7fdfbd68958-kube-api-access-mx5cr\") pod \"redhat-operators-2zs7d\" (UID: \"4363d0c0-d620-4a1b-a784-b7fdfbd68958\") " pod="openshift-marketplace/redhat-operators-2zs7d" Oct 01 06:04:59 crc kubenswrapper[4661]: I1001 06:04:59.373612 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4363d0c0-d620-4a1b-a784-b7fdfbd68958-catalog-content\") pod \"redhat-operators-2zs7d\" (UID: \"4363d0c0-d620-4a1b-a784-b7fdfbd68958\") " pod="openshift-marketplace/redhat-operators-2zs7d" Oct 01 06:04:59 crc kubenswrapper[4661]: I1001 06:04:59.373885 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4363d0c0-d620-4a1b-a784-b7fdfbd68958-utilities\") pod \"redhat-operators-2zs7d\" (UID: \"4363d0c0-d620-4a1b-a784-b7fdfbd68958\") " pod="openshift-marketplace/redhat-operators-2zs7d" Oct 01 06:04:59 crc kubenswrapper[4661]: I1001 06:04:59.375468 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4363d0c0-d620-4a1b-a784-b7fdfbd68958-catalog-content\") pod \"redhat-operators-2zs7d\" (UID: \"4363d0c0-d620-4a1b-a784-b7fdfbd68958\") " pod="openshift-marketplace/redhat-operators-2zs7d" Oct 01 06:04:59 crc kubenswrapper[4661]: I1001 06:04:59.395916 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mx5cr\" (UniqueName: \"kubernetes.io/projected/4363d0c0-d620-4a1b-a784-b7fdfbd68958-kube-api-access-mx5cr\") pod \"redhat-operators-2zs7d\" (UID: \"4363d0c0-d620-4a1b-a784-b7fdfbd68958\") " pod="openshift-marketplace/redhat-operators-2zs7d" Oct 01 06:04:59 crc kubenswrapper[4661]: I1001 06:04:59.468150 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2zs7d" Oct 01 06:04:59 crc kubenswrapper[4661]: I1001 06:04:59.935154 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2zs7d"] Oct 01 06:04:59 crc kubenswrapper[4661]: W1001 06:04:59.952195 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4363d0c0_d620_4a1b_a784_b7fdfbd68958.slice/crio-3151601409bc206962eb828bfa1be1b83251f0e5c408606abf292fceb284f710 WatchSource:0}: Error finding container 3151601409bc206962eb828bfa1be1b83251f0e5c408606abf292fceb284f710: Status 404 returned error can't find the container with id 3151601409bc206962eb828bfa1be1b83251f0e5c408606abf292fceb284f710 Oct 01 06:05:00 crc kubenswrapper[4661]: I1001 06:05:00.862342 4661 generic.go:334] "Generic (PLEG): container finished" podID="4363d0c0-d620-4a1b-a784-b7fdfbd68958" containerID="130ca0e2f286fe733b73133349b1609d2b770e1cc665a6de7d8cff1ab976d393" exitCode=0 Oct 01 06:05:00 crc kubenswrapper[4661]: I1001 06:05:00.862683 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2zs7d" event={"ID":"4363d0c0-d620-4a1b-a784-b7fdfbd68958","Type":"ContainerDied","Data":"130ca0e2f286fe733b73133349b1609d2b770e1cc665a6de7d8cff1ab976d393"} Oct 01 06:05:00 crc kubenswrapper[4661]: I1001 06:05:00.862724 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2zs7d" event={"ID":"4363d0c0-d620-4a1b-a784-b7fdfbd68958","Type":"ContainerStarted","Data":"3151601409bc206962eb828bfa1be1b83251f0e5c408606abf292fceb284f710"} Oct 01 06:05:00 crc kubenswrapper[4661]: I1001 06:05:00.869751 4661 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 06:05:02 crc kubenswrapper[4661]: I1001 06:05:02.885609 4661 generic.go:334] "Generic (PLEG): container finished" podID="4363d0c0-d620-4a1b-a784-b7fdfbd68958" containerID="9076459d905c9dc46863ef578f60592d119b56b99ae6d86039937f6cf5219532" exitCode=0 Oct 01 06:05:02 crc kubenswrapper[4661]: I1001 06:05:02.885675 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2zs7d" event={"ID":"4363d0c0-d620-4a1b-a784-b7fdfbd68958","Type":"ContainerDied","Data":"9076459d905c9dc46863ef578f60592d119b56b99ae6d86039937f6cf5219532"} Oct 01 06:05:03 crc kubenswrapper[4661]: I1001 06:05:03.898443 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2zs7d" event={"ID":"4363d0c0-d620-4a1b-a784-b7fdfbd68958","Type":"ContainerStarted","Data":"7d06afb36e88a04e6d5bc488e37c4077840fa545df6ac338281127b8689c27e9"} Oct 01 06:05:03 crc kubenswrapper[4661]: I1001 06:05:03.919943 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2zs7d" podStartSLOduration=2.49334042 podStartE2EDuration="4.919922214s" podCreationTimestamp="2025-10-01 06:04:59 +0000 UTC" firstStartedPulling="2025-10-01 06:05:00.869437691 +0000 UTC m=+2149.807416305" lastFinishedPulling="2025-10-01 06:05:03.296019485 +0000 UTC m=+2152.233998099" observedRunningTime="2025-10-01 06:05:03.919008289 +0000 UTC m=+2152.856986903" watchObservedRunningTime="2025-10-01 06:05:03.919922214 +0000 UTC m=+2152.857900838" Oct 01 06:05:04 crc kubenswrapper[4661]: I1001 06:05:04.309788 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:05:04 crc kubenswrapper[4661]: I1001 06:05:04.309847 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:05:09 crc kubenswrapper[4661]: I1001 06:05:09.469087 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2zs7d" Oct 01 06:05:09 crc kubenswrapper[4661]: I1001 06:05:09.469624 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2zs7d" Oct 01 06:05:09 crc kubenswrapper[4661]: I1001 06:05:09.539356 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2zs7d" Oct 01 06:05:10 crc kubenswrapper[4661]: I1001 06:05:10.053188 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2zs7d" Oct 01 06:05:10 crc kubenswrapper[4661]: I1001 06:05:10.123945 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2zs7d"] Oct 01 06:05:11 crc kubenswrapper[4661]: I1001 06:05:11.991228 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2zs7d" podUID="4363d0c0-d620-4a1b-a784-b7fdfbd68958" containerName="registry-server" containerID="cri-o://7d06afb36e88a04e6d5bc488e37c4077840fa545df6ac338281127b8689c27e9" gracePeriod=2 Oct 01 06:05:12 crc kubenswrapper[4661]: I1001 06:05:12.523308 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2zs7d" Oct 01 06:05:12 crc kubenswrapper[4661]: I1001 06:05:12.561885 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4363d0c0-d620-4a1b-a784-b7fdfbd68958-catalog-content\") pod \"4363d0c0-d620-4a1b-a784-b7fdfbd68958\" (UID: \"4363d0c0-d620-4a1b-a784-b7fdfbd68958\") " Oct 01 06:05:12 crc kubenswrapper[4661]: I1001 06:05:12.562031 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4363d0c0-d620-4a1b-a784-b7fdfbd68958-utilities\") pod \"4363d0c0-d620-4a1b-a784-b7fdfbd68958\" (UID: \"4363d0c0-d620-4a1b-a784-b7fdfbd68958\") " Oct 01 06:05:12 crc kubenswrapper[4661]: I1001 06:05:12.562125 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mx5cr\" (UniqueName: \"kubernetes.io/projected/4363d0c0-d620-4a1b-a784-b7fdfbd68958-kube-api-access-mx5cr\") pod \"4363d0c0-d620-4a1b-a784-b7fdfbd68958\" (UID: \"4363d0c0-d620-4a1b-a784-b7fdfbd68958\") " Oct 01 06:05:12 crc kubenswrapper[4661]: I1001 06:05:12.564027 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4363d0c0-d620-4a1b-a784-b7fdfbd68958-utilities" (OuterVolumeSpecName: "utilities") pod "4363d0c0-d620-4a1b-a784-b7fdfbd68958" (UID: "4363d0c0-d620-4a1b-a784-b7fdfbd68958"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:05:12 crc kubenswrapper[4661]: I1001 06:05:12.574890 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4363d0c0-d620-4a1b-a784-b7fdfbd68958-kube-api-access-mx5cr" (OuterVolumeSpecName: "kube-api-access-mx5cr") pod "4363d0c0-d620-4a1b-a784-b7fdfbd68958" (UID: "4363d0c0-d620-4a1b-a784-b7fdfbd68958"). InnerVolumeSpecName "kube-api-access-mx5cr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:05:12 crc kubenswrapper[4661]: I1001 06:05:12.660748 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4363d0c0-d620-4a1b-a784-b7fdfbd68958-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4363d0c0-d620-4a1b-a784-b7fdfbd68958" (UID: "4363d0c0-d620-4a1b-a784-b7fdfbd68958"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:05:12 crc kubenswrapper[4661]: I1001 06:05:12.665067 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4363d0c0-d620-4a1b-a784-b7fdfbd68958-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:05:12 crc kubenswrapper[4661]: I1001 06:05:12.665210 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4363d0c0-d620-4a1b-a784-b7fdfbd68958-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:05:12 crc kubenswrapper[4661]: I1001 06:05:12.665293 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mx5cr\" (UniqueName: \"kubernetes.io/projected/4363d0c0-d620-4a1b-a784-b7fdfbd68958-kube-api-access-mx5cr\") on node \"crc\" DevicePath \"\"" Oct 01 06:05:13 crc kubenswrapper[4661]: I1001 06:05:13.004750 4661 generic.go:334] "Generic (PLEG): container finished" podID="4363d0c0-d620-4a1b-a784-b7fdfbd68958" containerID="7d06afb36e88a04e6d5bc488e37c4077840fa545df6ac338281127b8689c27e9" exitCode=0 Oct 01 06:05:13 crc kubenswrapper[4661]: I1001 06:05:13.004794 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2zs7d" event={"ID":"4363d0c0-d620-4a1b-a784-b7fdfbd68958","Type":"ContainerDied","Data":"7d06afb36e88a04e6d5bc488e37c4077840fa545df6ac338281127b8689c27e9"} Oct 01 06:05:13 crc kubenswrapper[4661]: I1001 06:05:13.005143 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2zs7d" event={"ID":"4363d0c0-d620-4a1b-a784-b7fdfbd68958","Type":"ContainerDied","Data":"3151601409bc206962eb828bfa1be1b83251f0e5c408606abf292fceb284f710"} Oct 01 06:05:13 crc kubenswrapper[4661]: I1001 06:05:13.004880 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2zs7d" Oct 01 06:05:13 crc kubenswrapper[4661]: I1001 06:05:13.006687 4661 scope.go:117] "RemoveContainer" containerID="7d06afb36e88a04e6d5bc488e37c4077840fa545df6ac338281127b8689c27e9" Oct 01 06:05:13 crc kubenswrapper[4661]: I1001 06:05:13.040375 4661 scope.go:117] "RemoveContainer" containerID="9076459d905c9dc46863ef578f60592d119b56b99ae6d86039937f6cf5219532" Oct 01 06:05:13 crc kubenswrapper[4661]: I1001 06:05:13.051479 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2zs7d"] Oct 01 06:05:13 crc kubenswrapper[4661]: I1001 06:05:13.061551 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2zs7d"] Oct 01 06:05:13 crc kubenswrapper[4661]: I1001 06:05:13.077340 4661 scope.go:117] "RemoveContainer" containerID="130ca0e2f286fe733b73133349b1609d2b770e1cc665a6de7d8cff1ab976d393" Oct 01 06:05:13 crc kubenswrapper[4661]: I1001 06:05:13.119557 4661 scope.go:117] "RemoveContainer" containerID="7d06afb36e88a04e6d5bc488e37c4077840fa545df6ac338281127b8689c27e9" Oct 01 06:05:13 crc kubenswrapper[4661]: E1001 06:05:13.120037 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d06afb36e88a04e6d5bc488e37c4077840fa545df6ac338281127b8689c27e9\": container with ID starting with 7d06afb36e88a04e6d5bc488e37c4077840fa545df6ac338281127b8689c27e9 not found: ID does not exist" containerID="7d06afb36e88a04e6d5bc488e37c4077840fa545df6ac338281127b8689c27e9" Oct 01 06:05:13 crc kubenswrapper[4661]: I1001 06:05:13.120069 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d06afb36e88a04e6d5bc488e37c4077840fa545df6ac338281127b8689c27e9"} err="failed to get container status \"7d06afb36e88a04e6d5bc488e37c4077840fa545df6ac338281127b8689c27e9\": rpc error: code = NotFound desc = could not find container \"7d06afb36e88a04e6d5bc488e37c4077840fa545df6ac338281127b8689c27e9\": container with ID starting with 7d06afb36e88a04e6d5bc488e37c4077840fa545df6ac338281127b8689c27e9 not found: ID does not exist" Oct 01 06:05:13 crc kubenswrapper[4661]: I1001 06:05:13.120088 4661 scope.go:117] "RemoveContainer" containerID="9076459d905c9dc46863ef578f60592d119b56b99ae6d86039937f6cf5219532" Oct 01 06:05:13 crc kubenswrapper[4661]: E1001 06:05:13.120407 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9076459d905c9dc46863ef578f60592d119b56b99ae6d86039937f6cf5219532\": container with ID starting with 9076459d905c9dc46863ef578f60592d119b56b99ae6d86039937f6cf5219532 not found: ID does not exist" containerID="9076459d905c9dc46863ef578f60592d119b56b99ae6d86039937f6cf5219532" Oct 01 06:05:13 crc kubenswrapper[4661]: I1001 06:05:13.120429 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9076459d905c9dc46863ef578f60592d119b56b99ae6d86039937f6cf5219532"} err="failed to get container status \"9076459d905c9dc46863ef578f60592d119b56b99ae6d86039937f6cf5219532\": rpc error: code = NotFound desc = could not find container \"9076459d905c9dc46863ef578f60592d119b56b99ae6d86039937f6cf5219532\": container with ID starting with 9076459d905c9dc46863ef578f60592d119b56b99ae6d86039937f6cf5219532 not found: ID does not exist" Oct 01 06:05:13 crc kubenswrapper[4661]: I1001 06:05:13.120443 4661 scope.go:117] "RemoveContainer" containerID="130ca0e2f286fe733b73133349b1609d2b770e1cc665a6de7d8cff1ab976d393" Oct 01 06:05:13 crc kubenswrapper[4661]: E1001 06:05:13.120888 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"130ca0e2f286fe733b73133349b1609d2b770e1cc665a6de7d8cff1ab976d393\": container with ID starting with 130ca0e2f286fe733b73133349b1609d2b770e1cc665a6de7d8cff1ab976d393 not found: ID does not exist" containerID="130ca0e2f286fe733b73133349b1609d2b770e1cc665a6de7d8cff1ab976d393" Oct 01 06:05:13 crc kubenswrapper[4661]: I1001 06:05:13.120911 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"130ca0e2f286fe733b73133349b1609d2b770e1cc665a6de7d8cff1ab976d393"} err="failed to get container status \"130ca0e2f286fe733b73133349b1609d2b770e1cc665a6de7d8cff1ab976d393\": rpc error: code = NotFound desc = could not find container \"130ca0e2f286fe733b73133349b1609d2b770e1cc665a6de7d8cff1ab976d393\": container with ID starting with 130ca0e2f286fe733b73133349b1609d2b770e1cc665a6de7d8cff1ab976d393 not found: ID does not exist" Oct 01 06:05:13 crc kubenswrapper[4661]: I1001 06:05:13.778267 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4363d0c0-d620-4a1b-a784-b7fdfbd68958" path="/var/lib/kubelet/pods/4363d0c0-d620-4a1b-a784-b7fdfbd68958/volumes" Oct 01 06:05:23 crc kubenswrapper[4661]: I1001 06:05:23.127042 4661 generic.go:334] "Generic (PLEG): container finished" podID="5dc6881d-aedd-4945-98d8-9993fedd71dd" containerID="ae1bfb9a11d21c19f12c40e964fa72ea1e4006f849bad59fcf2d246e0d832233" exitCode=0 Oct 01 06:05:23 crc kubenswrapper[4661]: I1001 06:05:23.127203 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" event={"ID":"5dc6881d-aedd-4945-98d8-9993fedd71dd","Type":"ContainerDied","Data":"ae1bfb9a11d21c19f12c40e964fa72ea1e4006f849bad59fcf2d246e0d832233"} Oct 01 06:05:24 crc kubenswrapper[4661]: I1001 06:05:24.601415 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" Oct 01 06:05:24 crc kubenswrapper[4661]: I1001 06:05:24.713597 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5dc6881d-aedd-4945-98d8-9993fedd71dd-ssh-key\") pod \"5dc6881d-aedd-4945-98d8-9993fedd71dd\" (UID: \"5dc6881d-aedd-4945-98d8-9993fedd71dd\") " Oct 01 06:05:24 crc kubenswrapper[4661]: I1001 06:05:24.713767 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tkr6c\" (UniqueName: \"kubernetes.io/projected/5dc6881d-aedd-4945-98d8-9993fedd71dd-kube-api-access-tkr6c\") pod \"5dc6881d-aedd-4945-98d8-9993fedd71dd\" (UID: \"5dc6881d-aedd-4945-98d8-9993fedd71dd\") " Oct 01 06:05:24 crc kubenswrapper[4661]: I1001 06:05:24.714040 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5dc6881d-aedd-4945-98d8-9993fedd71dd-ovn-combined-ca-bundle\") pod \"5dc6881d-aedd-4945-98d8-9993fedd71dd\" (UID: \"5dc6881d-aedd-4945-98d8-9993fedd71dd\") " Oct 01 06:05:24 crc kubenswrapper[4661]: I1001 06:05:24.714058 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5dc6881d-aedd-4945-98d8-9993fedd71dd-inventory\") pod \"5dc6881d-aedd-4945-98d8-9993fedd71dd\" (UID: \"5dc6881d-aedd-4945-98d8-9993fedd71dd\") " Oct 01 06:05:24 crc kubenswrapper[4661]: I1001 06:05:24.714100 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5dc6881d-aedd-4945-98d8-9993fedd71dd-ovncontroller-config-0\") pod \"5dc6881d-aedd-4945-98d8-9993fedd71dd\" (UID: \"5dc6881d-aedd-4945-98d8-9993fedd71dd\") " Oct 01 06:05:24 crc kubenswrapper[4661]: I1001 06:05:24.719606 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5dc6881d-aedd-4945-98d8-9993fedd71dd-kube-api-access-tkr6c" (OuterVolumeSpecName: "kube-api-access-tkr6c") pod "5dc6881d-aedd-4945-98d8-9993fedd71dd" (UID: "5dc6881d-aedd-4945-98d8-9993fedd71dd"). InnerVolumeSpecName "kube-api-access-tkr6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:05:24 crc kubenswrapper[4661]: I1001 06:05:24.721335 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5dc6881d-aedd-4945-98d8-9993fedd71dd-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "5dc6881d-aedd-4945-98d8-9993fedd71dd" (UID: "5dc6881d-aedd-4945-98d8-9993fedd71dd"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:05:24 crc kubenswrapper[4661]: I1001 06:05:24.742374 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5dc6881d-aedd-4945-98d8-9993fedd71dd-inventory" (OuterVolumeSpecName: "inventory") pod "5dc6881d-aedd-4945-98d8-9993fedd71dd" (UID: "5dc6881d-aedd-4945-98d8-9993fedd71dd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:05:24 crc kubenswrapper[4661]: I1001 06:05:24.744207 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5dc6881d-aedd-4945-98d8-9993fedd71dd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5dc6881d-aedd-4945-98d8-9993fedd71dd" (UID: "5dc6881d-aedd-4945-98d8-9993fedd71dd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:05:24 crc kubenswrapper[4661]: I1001 06:05:24.758751 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5dc6881d-aedd-4945-98d8-9993fedd71dd-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "5dc6881d-aedd-4945-98d8-9993fedd71dd" (UID: "5dc6881d-aedd-4945-98d8-9993fedd71dd"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:05:24 crc kubenswrapper[4661]: I1001 06:05:24.816280 4661 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5dc6881d-aedd-4945-98d8-9993fedd71dd-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:05:24 crc kubenswrapper[4661]: I1001 06:05:24.816328 4661 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5dc6881d-aedd-4945-98d8-9993fedd71dd-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 06:05:24 crc kubenswrapper[4661]: I1001 06:05:24.816340 4661 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5dc6881d-aedd-4945-98d8-9993fedd71dd-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 01 06:05:24 crc kubenswrapper[4661]: I1001 06:05:24.816352 4661 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5dc6881d-aedd-4945-98d8-9993fedd71dd-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 06:05:24 crc kubenswrapper[4661]: I1001 06:05:24.816362 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tkr6c\" (UniqueName: \"kubernetes.io/projected/5dc6881d-aedd-4945-98d8-9993fedd71dd-kube-api-access-tkr6c\") on node \"crc\" DevicePath \"\"" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.154768 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" event={"ID":"5dc6881d-aedd-4945-98d8-9993fedd71dd","Type":"ContainerDied","Data":"4120fe77254b137dfec3fc31d9c320f76f8afdcaad3227580d5662a948e8832d"} Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.154816 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4120fe77254b137dfec3fc31d9c320f76f8afdcaad3227580d5662a948e8832d" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.154862 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-v8wft" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.279233 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft"] Oct 01 06:05:25 crc kubenswrapper[4661]: E1001 06:05:25.279663 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4363d0c0-d620-4a1b-a784-b7fdfbd68958" containerName="extract-content" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.279688 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="4363d0c0-d620-4a1b-a784-b7fdfbd68958" containerName="extract-content" Oct 01 06:05:25 crc kubenswrapper[4661]: E1001 06:05:25.279728 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5dc6881d-aedd-4945-98d8-9993fedd71dd" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.279737 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="5dc6881d-aedd-4945-98d8-9993fedd71dd" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 01 06:05:25 crc kubenswrapper[4661]: E1001 06:05:25.279753 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4363d0c0-d620-4a1b-a784-b7fdfbd68958" containerName="registry-server" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.279762 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="4363d0c0-d620-4a1b-a784-b7fdfbd68958" containerName="registry-server" Oct 01 06:05:25 crc kubenswrapper[4661]: E1001 06:05:25.279792 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4363d0c0-d620-4a1b-a784-b7fdfbd68958" containerName="extract-utilities" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.279801 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="4363d0c0-d620-4a1b-a784-b7fdfbd68958" containerName="extract-utilities" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.280015 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="5dc6881d-aedd-4945-98d8-9993fedd71dd" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.280056 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="4363d0c0-d620-4a1b-a784-b7fdfbd68958" containerName="registry-server" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.280901 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.283031 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.283858 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.284269 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.284560 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.284908 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.284908 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-srk7f" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.299852 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft"] Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.427546 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf47r\" (UniqueName: \"kubernetes.io/projected/632190ce-99ee-453c-8cdb-103d2f512c6b-kube-api-access-zf47r\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.427837 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.427937 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.428177 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.428238 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.428302 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.530362 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.530575 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.530681 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.530722 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.530853 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf47r\" (UniqueName: \"kubernetes.io/projected/632190ce-99ee-453c-8cdb-103d2f512c6b-kube-api-access-zf47r\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.530947 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.537675 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.540193 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.542066 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.553133 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.565237 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf47r\" (UniqueName: \"kubernetes.io/projected/632190ce-99ee-453c-8cdb-103d2f512c6b-kube-api-access-zf47r\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.570691 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" Oct 01 06:05:25 crc kubenswrapper[4661]: I1001 06:05:25.600948 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" Oct 01 06:05:26 crc kubenswrapper[4661]: I1001 06:05:26.176551 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft"] Oct 01 06:05:27 crc kubenswrapper[4661]: I1001 06:05:27.176236 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" event={"ID":"632190ce-99ee-453c-8cdb-103d2f512c6b","Type":"ContainerStarted","Data":"bb16d8bf0dad65263be872fc3e5a597628014312fe8c73bff48ca2396bcfb885"} Oct 01 06:05:28 crc kubenswrapper[4661]: I1001 06:05:28.194937 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" event={"ID":"632190ce-99ee-453c-8cdb-103d2f512c6b","Type":"ContainerStarted","Data":"50ef0eaa49869ae174a29891b617dae6788d762332a878a8fec61ab6c83f46b2"} Oct 01 06:05:28 crc kubenswrapper[4661]: I1001 06:05:28.225406 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" podStartSLOduration=2.466753166 podStartE2EDuration="3.225379088s" podCreationTimestamp="2025-10-01 06:05:25 +0000 UTC" firstStartedPulling="2025-10-01 06:05:26.190143687 +0000 UTC m=+2175.128122301" lastFinishedPulling="2025-10-01 06:05:26.948769589 +0000 UTC m=+2175.886748223" observedRunningTime="2025-10-01 06:05:28.22322709 +0000 UTC m=+2177.161205704" watchObservedRunningTime="2025-10-01 06:05:28.225379088 +0000 UTC m=+2177.163357732" Oct 01 06:05:34 crc kubenswrapper[4661]: I1001 06:05:34.309512 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:05:34 crc kubenswrapper[4661]: I1001 06:05:34.310158 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:05:54 crc kubenswrapper[4661]: I1001 06:05:54.718826 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qmgdk"] Oct 01 06:05:54 crc kubenswrapper[4661]: I1001 06:05:54.724957 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qmgdk" Oct 01 06:05:54 crc kubenswrapper[4661]: I1001 06:05:54.733374 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qmgdk"] Oct 01 06:05:54 crc kubenswrapper[4661]: I1001 06:05:54.878366 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6423e07b-42dc-4fb5-a746-47260babbda2-catalog-content\") pod \"certified-operators-qmgdk\" (UID: \"6423e07b-42dc-4fb5-a746-47260babbda2\") " pod="openshift-marketplace/certified-operators-qmgdk" Oct 01 06:05:54 crc kubenswrapper[4661]: I1001 06:05:54.878579 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hghpg\" (UniqueName: \"kubernetes.io/projected/6423e07b-42dc-4fb5-a746-47260babbda2-kube-api-access-hghpg\") pod \"certified-operators-qmgdk\" (UID: \"6423e07b-42dc-4fb5-a746-47260babbda2\") " pod="openshift-marketplace/certified-operators-qmgdk" Oct 01 06:05:54 crc kubenswrapper[4661]: I1001 06:05:54.878673 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6423e07b-42dc-4fb5-a746-47260babbda2-utilities\") pod \"certified-operators-qmgdk\" (UID: \"6423e07b-42dc-4fb5-a746-47260babbda2\") " pod="openshift-marketplace/certified-operators-qmgdk" Oct 01 06:05:54 crc kubenswrapper[4661]: I1001 06:05:54.981760 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6423e07b-42dc-4fb5-a746-47260babbda2-catalog-content\") pod \"certified-operators-qmgdk\" (UID: \"6423e07b-42dc-4fb5-a746-47260babbda2\") " pod="openshift-marketplace/certified-operators-qmgdk" Oct 01 06:05:54 crc kubenswrapper[4661]: I1001 06:05:54.981852 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hghpg\" (UniqueName: \"kubernetes.io/projected/6423e07b-42dc-4fb5-a746-47260babbda2-kube-api-access-hghpg\") pod \"certified-operators-qmgdk\" (UID: \"6423e07b-42dc-4fb5-a746-47260babbda2\") " pod="openshift-marketplace/certified-operators-qmgdk" Oct 01 06:05:54 crc kubenswrapper[4661]: I1001 06:05:54.981883 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6423e07b-42dc-4fb5-a746-47260babbda2-utilities\") pod \"certified-operators-qmgdk\" (UID: \"6423e07b-42dc-4fb5-a746-47260babbda2\") " pod="openshift-marketplace/certified-operators-qmgdk" Oct 01 06:05:54 crc kubenswrapper[4661]: I1001 06:05:54.982388 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6423e07b-42dc-4fb5-a746-47260babbda2-catalog-content\") pod \"certified-operators-qmgdk\" (UID: \"6423e07b-42dc-4fb5-a746-47260babbda2\") " pod="openshift-marketplace/certified-operators-qmgdk" Oct 01 06:05:54 crc kubenswrapper[4661]: I1001 06:05:54.982442 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6423e07b-42dc-4fb5-a746-47260babbda2-utilities\") pod \"certified-operators-qmgdk\" (UID: \"6423e07b-42dc-4fb5-a746-47260babbda2\") " pod="openshift-marketplace/certified-operators-qmgdk" Oct 01 06:05:55 crc kubenswrapper[4661]: I1001 06:05:55.007492 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hghpg\" (UniqueName: \"kubernetes.io/projected/6423e07b-42dc-4fb5-a746-47260babbda2-kube-api-access-hghpg\") pod \"certified-operators-qmgdk\" (UID: \"6423e07b-42dc-4fb5-a746-47260babbda2\") " pod="openshift-marketplace/certified-operators-qmgdk" Oct 01 06:05:55 crc kubenswrapper[4661]: I1001 06:05:55.059285 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qmgdk" Oct 01 06:05:55 crc kubenswrapper[4661]: I1001 06:05:55.578466 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qmgdk"] Oct 01 06:05:56 crc kubenswrapper[4661]: I1001 06:05:56.534404 4661 generic.go:334] "Generic (PLEG): container finished" podID="6423e07b-42dc-4fb5-a746-47260babbda2" containerID="6cd40a130d11589fc637543675b9dbd6cfde252947e2bf6a2fcca14f5c374499" exitCode=0 Oct 01 06:05:56 crc kubenswrapper[4661]: I1001 06:05:56.534502 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qmgdk" event={"ID":"6423e07b-42dc-4fb5-a746-47260babbda2","Type":"ContainerDied","Data":"6cd40a130d11589fc637543675b9dbd6cfde252947e2bf6a2fcca14f5c374499"} Oct 01 06:05:56 crc kubenswrapper[4661]: I1001 06:05:56.535837 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qmgdk" event={"ID":"6423e07b-42dc-4fb5-a746-47260babbda2","Type":"ContainerStarted","Data":"6703ef31377343dde2ed7d84d3b10e66936cb56a6a202e2a2dcfff9b0650f69e"} Oct 01 06:05:57 crc kubenswrapper[4661]: I1001 06:05:57.546274 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qmgdk" event={"ID":"6423e07b-42dc-4fb5-a746-47260babbda2","Type":"ContainerStarted","Data":"e9808cfe73ac78359333abe1e7c84146e09588e87e10abb680213e2a88a3825a"} Oct 01 06:05:58 crc kubenswrapper[4661]: I1001 06:05:58.557121 4661 generic.go:334] "Generic (PLEG): container finished" podID="6423e07b-42dc-4fb5-a746-47260babbda2" containerID="e9808cfe73ac78359333abe1e7c84146e09588e87e10abb680213e2a88a3825a" exitCode=0 Oct 01 06:05:58 crc kubenswrapper[4661]: I1001 06:05:58.557171 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qmgdk" event={"ID":"6423e07b-42dc-4fb5-a746-47260babbda2","Type":"ContainerDied","Data":"e9808cfe73ac78359333abe1e7c84146e09588e87e10abb680213e2a88a3825a"} Oct 01 06:05:59 crc kubenswrapper[4661]: I1001 06:05:59.576190 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qmgdk" event={"ID":"6423e07b-42dc-4fb5-a746-47260babbda2","Type":"ContainerStarted","Data":"f440d0b28d91b62cda0e258bc859f383647357de9aef292b3ae7cd4c275df0c1"} Oct 01 06:05:59 crc kubenswrapper[4661]: I1001 06:05:59.600752 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qmgdk" podStartSLOduration=3.029366203 podStartE2EDuration="5.600731233s" podCreationTimestamp="2025-10-01 06:05:54 +0000 UTC" firstStartedPulling="2025-10-01 06:05:56.537259338 +0000 UTC m=+2205.475237952" lastFinishedPulling="2025-10-01 06:05:59.108624328 +0000 UTC m=+2208.046602982" observedRunningTime="2025-10-01 06:05:59.59210168 +0000 UTC m=+2208.530080304" watchObservedRunningTime="2025-10-01 06:05:59.600731233 +0000 UTC m=+2208.538709857" Oct 01 06:06:04 crc kubenswrapper[4661]: I1001 06:06:04.309266 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:06:04 crc kubenswrapper[4661]: I1001 06:06:04.309894 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:06:04 crc kubenswrapper[4661]: I1001 06:06:04.309955 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 06:06:04 crc kubenswrapper[4661]: I1001 06:06:04.310900 4661 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a"} pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 06:06:04 crc kubenswrapper[4661]: I1001 06:06:04.310975 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" containerID="cri-o://eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" gracePeriod=600 Oct 01 06:06:04 crc kubenswrapper[4661]: E1001 06:06:04.435673 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:06:04 crc kubenswrapper[4661]: I1001 06:06:04.638959 4661 generic.go:334] "Generic (PLEG): container finished" podID="7584c4bc-4202-487e-a2b4-4319f428a792" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" exitCode=0 Oct 01 06:06:04 crc kubenswrapper[4661]: I1001 06:06:04.639005 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerDied","Data":"eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a"} Oct 01 06:06:04 crc kubenswrapper[4661]: I1001 06:06:04.639386 4661 scope.go:117] "RemoveContainer" containerID="9945639cebecb82e62ea77554393d94ec336d0d27f9ce3233bff36e74217a74a" Oct 01 06:06:04 crc kubenswrapper[4661]: I1001 06:06:04.641897 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:06:04 crc kubenswrapper[4661]: E1001 06:06:04.642865 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:06:05 crc kubenswrapper[4661]: I1001 06:06:05.060270 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qmgdk" Oct 01 06:06:05 crc kubenswrapper[4661]: I1001 06:06:05.060360 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qmgdk" Oct 01 06:06:05 crc kubenswrapper[4661]: I1001 06:06:05.122370 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qmgdk" Oct 01 06:06:05 crc kubenswrapper[4661]: I1001 06:06:05.749991 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qmgdk" Oct 01 06:06:05 crc kubenswrapper[4661]: I1001 06:06:05.827213 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qmgdk"] Oct 01 06:06:07 crc kubenswrapper[4661]: I1001 06:06:07.685690 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qmgdk" podUID="6423e07b-42dc-4fb5-a746-47260babbda2" containerName="registry-server" containerID="cri-o://f440d0b28d91b62cda0e258bc859f383647357de9aef292b3ae7cd4c275df0c1" gracePeriod=2 Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.189775 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qmgdk" Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.278313 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6423e07b-42dc-4fb5-a746-47260babbda2-catalog-content\") pod \"6423e07b-42dc-4fb5-a746-47260babbda2\" (UID: \"6423e07b-42dc-4fb5-a746-47260babbda2\") " Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.278445 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6423e07b-42dc-4fb5-a746-47260babbda2-utilities\") pod \"6423e07b-42dc-4fb5-a746-47260babbda2\" (UID: \"6423e07b-42dc-4fb5-a746-47260babbda2\") " Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.278926 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hghpg\" (UniqueName: \"kubernetes.io/projected/6423e07b-42dc-4fb5-a746-47260babbda2-kube-api-access-hghpg\") pod \"6423e07b-42dc-4fb5-a746-47260babbda2\" (UID: \"6423e07b-42dc-4fb5-a746-47260babbda2\") " Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.282283 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6423e07b-42dc-4fb5-a746-47260babbda2-utilities" (OuterVolumeSpecName: "utilities") pod "6423e07b-42dc-4fb5-a746-47260babbda2" (UID: "6423e07b-42dc-4fb5-a746-47260babbda2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.287688 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6423e07b-42dc-4fb5-a746-47260babbda2-kube-api-access-hghpg" (OuterVolumeSpecName: "kube-api-access-hghpg") pod "6423e07b-42dc-4fb5-a746-47260babbda2" (UID: "6423e07b-42dc-4fb5-a746-47260babbda2"). InnerVolumeSpecName "kube-api-access-hghpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.344415 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6423e07b-42dc-4fb5-a746-47260babbda2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6423e07b-42dc-4fb5-a746-47260babbda2" (UID: "6423e07b-42dc-4fb5-a746-47260babbda2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.381836 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6423e07b-42dc-4fb5-a746-47260babbda2-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.381871 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6423e07b-42dc-4fb5-a746-47260babbda2-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.381881 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hghpg\" (UniqueName: \"kubernetes.io/projected/6423e07b-42dc-4fb5-a746-47260babbda2-kube-api-access-hghpg\") on node \"crc\" DevicePath \"\"" Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.701476 4661 generic.go:334] "Generic (PLEG): container finished" podID="6423e07b-42dc-4fb5-a746-47260babbda2" containerID="f440d0b28d91b62cda0e258bc859f383647357de9aef292b3ae7cd4c275df0c1" exitCode=0 Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.701578 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qmgdk" event={"ID":"6423e07b-42dc-4fb5-a746-47260babbda2","Type":"ContainerDied","Data":"f440d0b28d91b62cda0e258bc859f383647357de9aef292b3ae7cd4c275df0c1"} Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.701595 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qmgdk" Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.701926 4661 scope.go:117] "RemoveContainer" containerID="f440d0b28d91b62cda0e258bc859f383647357de9aef292b3ae7cd4c275df0c1" Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.701908 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qmgdk" event={"ID":"6423e07b-42dc-4fb5-a746-47260babbda2","Type":"ContainerDied","Data":"6703ef31377343dde2ed7d84d3b10e66936cb56a6a202e2a2dcfff9b0650f69e"} Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.754282 4661 scope.go:117] "RemoveContainer" containerID="e9808cfe73ac78359333abe1e7c84146e09588e87e10abb680213e2a88a3825a" Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.764201 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qmgdk"] Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.780449 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qmgdk"] Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.791696 4661 scope.go:117] "RemoveContainer" containerID="6cd40a130d11589fc637543675b9dbd6cfde252947e2bf6a2fcca14f5c374499" Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.827339 4661 scope.go:117] "RemoveContainer" containerID="f440d0b28d91b62cda0e258bc859f383647357de9aef292b3ae7cd4c275df0c1" Oct 01 06:06:08 crc kubenswrapper[4661]: E1001 06:06:08.828106 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f440d0b28d91b62cda0e258bc859f383647357de9aef292b3ae7cd4c275df0c1\": container with ID starting with f440d0b28d91b62cda0e258bc859f383647357de9aef292b3ae7cd4c275df0c1 not found: ID does not exist" containerID="f440d0b28d91b62cda0e258bc859f383647357de9aef292b3ae7cd4c275df0c1" Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.828171 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f440d0b28d91b62cda0e258bc859f383647357de9aef292b3ae7cd4c275df0c1"} err="failed to get container status \"f440d0b28d91b62cda0e258bc859f383647357de9aef292b3ae7cd4c275df0c1\": rpc error: code = NotFound desc = could not find container \"f440d0b28d91b62cda0e258bc859f383647357de9aef292b3ae7cd4c275df0c1\": container with ID starting with f440d0b28d91b62cda0e258bc859f383647357de9aef292b3ae7cd4c275df0c1 not found: ID does not exist" Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.828216 4661 scope.go:117] "RemoveContainer" containerID="e9808cfe73ac78359333abe1e7c84146e09588e87e10abb680213e2a88a3825a" Oct 01 06:06:08 crc kubenswrapper[4661]: E1001 06:06:08.828774 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9808cfe73ac78359333abe1e7c84146e09588e87e10abb680213e2a88a3825a\": container with ID starting with e9808cfe73ac78359333abe1e7c84146e09588e87e10abb680213e2a88a3825a not found: ID does not exist" containerID="e9808cfe73ac78359333abe1e7c84146e09588e87e10abb680213e2a88a3825a" Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.828815 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9808cfe73ac78359333abe1e7c84146e09588e87e10abb680213e2a88a3825a"} err="failed to get container status \"e9808cfe73ac78359333abe1e7c84146e09588e87e10abb680213e2a88a3825a\": rpc error: code = NotFound desc = could not find container \"e9808cfe73ac78359333abe1e7c84146e09588e87e10abb680213e2a88a3825a\": container with ID starting with e9808cfe73ac78359333abe1e7c84146e09588e87e10abb680213e2a88a3825a not found: ID does not exist" Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.828836 4661 scope.go:117] "RemoveContainer" containerID="6cd40a130d11589fc637543675b9dbd6cfde252947e2bf6a2fcca14f5c374499" Oct 01 06:06:08 crc kubenswrapper[4661]: E1001 06:06:08.829224 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6cd40a130d11589fc637543675b9dbd6cfde252947e2bf6a2fcca14f5c374499\": container with ID starting with 6cd40a130d11589fc637543675b9dbd6cfde252947e2bf6a2fcca14f5c374499 not found: ID does not exist" containerID="6cd40a130d11589fc637543675b9dbd6cfde252947e2bf6a2fcca14f5c374499" Oct 01 06:06:08 crc kubenswrapper[4661]: I1001 06:06:08.829246 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cd40a130d11589fc637543675b9dbd6cfde252947e2bf6a2fcca14f5c374499"} err="failed to get container status \"6cd40a130d11589fc637543675b9dbd6cfde252947e2bf6a2fcca14f5c374499\": rpc error: code = NotFound desc = could not find container \"6cd40a130d11589fc637543675b9dbd6cfde252947e2bf6a2fcca14f5c374499\": container with ID starting with 6cd40a130d11589fc637543675b9dbd6cfde252947e2bf6a2fcca14f5c374499 not found: ID does not exist" Oct 01 06:06:08 crc kubenswrapper[4661]: E1001 06:06:08.834841 4661 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6423e07b_42dc_4fb5_a746_47260babbda2.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6423e07b_42dc_4fb5_a746_47260babbda2.slice/crio-6703ef31377343dde2ed7d84d3b10e66936cb56a6a202e2a2dcfff9b0650f69e\": RecentStats: unable to find data in memory cache]" Oct 01 06:06:09 crc kubenswrapper[4661]: I1001 06:06:09.780357 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6423e07b-42dc-4fb5-a746-47260babbda2" path="/var/lib/kubelet/pods/6423e07b-42dc-4fb5-a746-47260babbda2/volumes" Oct 01 06:06:17 crc kubenswrapper[4661]: I1001 06:06:17.757989 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:06:17 crc kubenswrapper[4661]: E1001 06:06:17.758848 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:06:24 crc kubenswrapper[4661]: I1001 06:06:24.941065 4661 generic.go:334] "Generic (PLEG): container finished" podID="632190ce-99ee-453c-8cdb-103d2f512c6b" containerID="50ef0eaa49869ae174a29891b617dae6788d762332a878a8fec61ab6c83f46b2" exitCode=0 Oct 01 06:06:24 crc kubenswrapper[4661]: I1001 06:06:24.941759 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" event={"ID":"632190ce-99ee-453c-8cdb-103d2f512c6b","Type":"ContainerDied","Data":"50ef0eaa49869ae174a29891b617dae6788d762332a878a8fec61ab6c83f46b2"} Oct 01 06:06:26 crc kubenswrapper[4661]: I1001 06:06:26.490911 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" Oct 01 06:06:26 crc kubenswrapper[4661]: I1001 06:06:26.596937 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zf47r\" (UniqueName: \"kubernetes.io/projected/632190ce-99ee-453c-8cdb-103d2f512c6b-kube-api-access-zf47r\") pod \"632190ce-99ee-453c-8cdb-103d2f512c6b\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " Oct 01 06:06:26 crc kubenswrapper[4661]: I1001 06:06:26.597077 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-inventory\") pod \"632190ce-99ee-453c-8cdb-103d2f512c6b\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " Oct 01 06:06:26 crc kubenswrapper[4661]: I1001 06:06:26.597115 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-neutron-metadata-combined-ca-bundle\") pod \"632190ce-99ee-453c-8cdb-103d2f512c6b\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " Oct 01 06:06:26 crc kubenswrapper[4661]: I1001 06:06:26.597156 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-nova-metadata-neutron-config-0\") pod \"632190ce-99ee-453c-8cdb-103d2f512c6b\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " Oct 01 06:06:26 crc kubenswrapper[4661]: I1001 06:06:26.597206 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-ssh-key\") pod \"632190ce-99ee-453c-8cdb-103d2f512c6b\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " Oct 01 06:06:26 crc kubenswrapper[4661]: I1001 06:06:26.597263 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"632190ce-99ee-453c-8cdb-103d2f512c6b\" (UID: \"632190ce-99ee-453c-8cdb-103d2f512c6b\") " Oct 01 06:06:26 crc kubenswrapper[4661]: I1001 06:06:26.602511 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/632190ce-99ee-453c-8cdb-103d2f512c6b-kube-api-access-zf47r" (OuterVolumeSpecName: "kube-api-access-zf47r") pod "632190ce-99ee-453c-8cdb-103d2f512c6b" (UID: "632190ce-99ee-453c-8cdb-103d2f512c6b"). InnerVolumeSpecName "kube-api-access-zf47r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:06:26 crc kubenswrapper[4661]: I1001 06:06:26.605109 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "632190ce-99ee-453c-8cdb-103d2f512c6b" (UID: "632190ce-99ee-453c-8cdb-103d2f512c6b"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:06:26 crc kubenswrapper[4661]: I1001 06:06:26.633857 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "632190ce-99ee-453c-8cdb-103d2f512c6b" (UID: "632190ce-99ee-453c-8cdb-103d2f512c6b"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:06:26 crc kubenswrapper[4661]: I1001 06:06:26.643937 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "632190ce-99ee-453c-8cdb-103d2f512c6b" (UID: "632190ce-99ee-453c-8cdb-103d2f512c6b"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:06:26 crc kubenswrapper[4661]: I1001 06:06:26.647937 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-inventory" (OuterVolumeSpecName: "inventory") pod "632190ce-99ee-453c-8cdb-103d2f512c6b" (UID: "632190ce-99ee-453c-8cdb-103d2f512c6b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:06:26 crc kubenswrapper[4661]: I1001 06:06:26.651264 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "632190ce-99ee-453c-8cdb-103d2f512c6b" (UID: "632190ce-99ee-453c-8cdb-103d2f512c6b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:06:26 crc kubenswrapper[4661]: I1001 06:06:26.700966 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zf47r\" (UniqueName: \"kubernetes.io/projected/632190ce-99ee-453c-8cdb-103d2f512c6b-kube-api-access-zf47r\") on node \"crc\" DevicePath \"\"" Oct 01 06:06:26 crc kubenswrapper[4661]: I1001 06:06:26.701007 4661 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 06:06:26 crc kubenswrapper[4661]: I1001 06:06:26.701017 4661 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:06:26 crc kubenswrapper[4661]: I1001 06:06:26.701029 4661 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 01 06:06:26 crc kubenswrapper[4661]: I1001 06:06:26.701038 4661 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 06:06:26 crc kubenswrapper[4661]: I1001 06:06:26.701047 4661 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/632190ce-99ee-453c-8cdb-103d2f512c6b-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 01 06:06:26 crc kubenswrapper[4661]: I1001 06:06:26.971136 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" event={"ID":"632190ce-99ee-453c-8cdb-103d2f512c6b","Type":"ContainerDied","Data":"bb16d8bf0dad65263be872fc3e5a597628014312fe8c73bff48ca2396bcfb885"} Oct 01 06:06:26 crc kubenswrapper[4661]: I1001 06:06:26.971402 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb16d8bf0dad65263be872fc3e5a597628014312fe8c73bff48ca2396bcfb885" Oct 01 06:06:26 crc kubenswrapper[4661]: I1001 06:06:26.971351 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.175381 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq"] Oct 01 06:06:27 crc kubenswrapper[4661]: E1001 06:06:27.175927 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6423e07b-42dc-4fb5-a746-47260babbda2" containerName="extract-utilities" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.175957 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6423e07b-42dc-4fb5-a746-47260babbda2" containerName="extract-utilities" Oct 01 06:06:27 crc kubenswrapper[4661]: E1001 06:06:27.175995 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6423e07b-42dc-4fb5-a746-47260babbda2" containerName="registry-server" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.176005 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6423e07b-42dc-4fb5-a746-47260babbda2" containerName="registry-server" Oct 01 06:06:27 crc kubenswrapper[4661]: E1001 06:06:27.176024 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="632190ce-99ee-453c-8cdb-103d2f512c6b" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.176038 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="632190ce-99ee-453c-8cdb-103d2f512c6b" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 01 06:06:27 crc kubenswrapper[4661]: E1001 06:06:27.176086 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6423e07b-42dc-4fb5-a746-47260babbda2" containerName="extract-content" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.176098 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6423e07b-42dc-4fb5-a746-47260babbda2" containerName="extract-content" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.176379 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="6423e07b-42dc-4fb5-a746-47260babbda2" containerName="registry-server" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.176427 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="632190ce-99ee-453c-8cdb-103d2f512c6b" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.177549 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.183648 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.183704 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.183895 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-srk7f" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.183912 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.184098 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.187759 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq"] Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.212136 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq\" (UID: \"322bbaf3-0120-49be-90f1-04d42199e753\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.212249 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6djh\" (UniqueName: \"kubernetes.io/projected/322bbaf3-0120-49be-90f1-04d42199e753-kube-api-access-q6djh\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq\" (UID: \"322bbaf3-0120-49be-90f1-04d42199e753\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.212384 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq\" (UID: \"322bbaf3-0120-49be-90f1-04d42199e753\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.212449 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq\" (UID: \"322bbaf3-0120-49be-90f1-04d42199e753\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.212511 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq\" (UID: \"322bbaf3-0120-49be-90f1-04d42199e753\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.314101 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6djh\" (UniqueName: \"kubernetes.io/projected/322bbaf3-0120-49be-90f1-04d42199e753-kube-api-access-q6djh\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq\" (UID: \"322bbaf3-0120-49be-90f1-04d42199e753\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.314523 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq\" (UID: \"322bbaf3-0120-49be-90f1-04d42199e753\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.314806 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq\" (UID: \"322bbaf3-0120-49be-90f1-04d42199e753\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.315039 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq\" (UID: \"322bbaf3-0120-49be-90f1-04d42199e753\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.315297 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq\" (UID: \"322bbaf3-0120-49be-90f1-04d42199e753\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.319397 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq\" (UID: \"322bbaf3-0120-49be-90f1-04d42199e753\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.320542 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq\" (UID: \"322bbaf3-0120-49be-90f1-04d42199e753\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.322345 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq\" (UID: \"322bbaf3-0120-49be-90f1-04d42199e753\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.327773 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq\" (UID: \"322bbaf3-0120-49be-90f1-04d42199e753\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.338168 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6djh\" (UniqueName: \"kubernetes.io/projected/322bbaf3-0120-49be-90f1-04d42199e753-kube-api-access-q6djh\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq\" (UID: \"322bbaf3-0120-49be-90f1-04d42199e753\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" Oct 01 06:06:27 crc kubenswrapper[4661]: I1001 06:06:27.497358 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" Oct 01 06:06:28 crc kubenswrapper[4661]: I1001 06:06:28.103344 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq"] Oct 01 06:06:28 crc kubenswrapper[4661]: I1001 06:06:28.757799 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:06:28 crc kubenswrapper[4661]: E1001 06:06:28.758656 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:06:29 crc kubenswrapper[4661]: I1001 06:06:29.001153 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" event={"ID":"322bbaf3-0120-49be-90f1-04d42199e753","Type":"ContainerStarted","Data":"33e8f64dccf56ca2b220323dcdf3f586e2355801f64cfc87b1b04a07619589e8"} Oct 01 06:06:30 crc kubenswrapper[4661]: I1001 06:06:30.012229 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" event={"ID":"322bbaf3-0120-49be-90f1-04d42199e753","Type":"ContainerStarted","Data":"1664c206624cbdc4ae539e9fa0cd5c19ebe426236b117f11284b918005a1841b"} Oct 01 06:06:43 crc kubenswrapper[4661]: I1001 06:06:43.759188 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:06:43 crc kubenswrapper[4661]: E1001 06:06:43.760248 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:06:57 crc kubenswrapper[4661]: I1001 06:06:57.757363 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:06:57 crc kubenswrapper[4661]: E1001 06:06:57.758271 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:07:01 crc kubenswrapper[4661]: I1001 06:07:01.779695 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" podStartSLOduration=34.138214857 podStartE2EDuration="34.779669272s" podCreationTimestamp="2025-10-01 06:06:27 +0000 UTC" firstStartedPulling="2025-10-01 06:06:28.126840852 +0000 UTC m=+2237.064819506" lastFinishedPulling="2025-10-01 06:06:28.768295267 +0000 UTC m=+2237.706273921" observedRunningTime="2025-10-01 06:06:30.037063813 +0000 UTC m=+2238.975042427" watchObservedRunningTime="2025-10-01 06:07:01.779669272 +0000 UTC m=+2270.717647926" Oct 01 06:07:01 crc kubenswrapper[4661]: I1001 06:07:01.788678 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2wbw2"] Oct 01 06:07:01 crc kubenswrapper[4661]: I1001 06:07:01.792378 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2wbw2" Oct 01 06:07:01 crc kubenswrapper[4661]: I1001 06:07:01.810838 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2wbw2"] Oct 01 06:07:01 crc kubenswrapper[4661]: I1001 06:07:01.941959 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37d0853a-88e6-4afe-8c93-2d4d33f19589-utilities\") pod \"redhat-marketplace-2wbw2\" (UID: \"37d0853a-88e6-4afe-8c93-2d4d33f19589\") " pod="openshift-marketplace/redhat-marketplace-2wbw2" Oct 01 06:07:01 crc kubenswrapper[4661]: I1001 06:07:01.942071 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37d0853a-88e6-4afe-8c93-2d4d33f19589-catalog-content\") pod \"redhat-marketplace-2wbw2\" (UID: \"37d0853a-88e6-4afe-8c93-2d4d33f19589\") " pod="openshift-marketplace/redhat-marketplace-2wbw2" Oct 01 06:07:01 crc kubenswrapper[4661]: I1001 06:07:01.942772 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxphx\" (UniqueName: \"kubernetes.io/projected/37d0853a-88e6-4afe-8c93-2d4d33f19589-kube-api-access-zxphx\") pod \"redhat-marketplace-2wbw2\" (UID: \"37d0853a-88e6-4afe-8c93-2d4d33f19589\") " pod="openshift-marketplace/redhat-marketplace-2wbw2" Oct 01 06:07:02 crc kubenswrapper[4661]: I1001 06:07:02.044577 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxphx\" (UniqueName: \"kubernetes.io/projected/37d0853a-88e6-4afe-8c93-2d4d33f19589-kube-api-access-zxphx\") pod \"redhat-marketplace-2wbw2\" (UID: \"37d0853a-88e6-4afe-8c93-2d4d33f19589\") " pod="openshift-marketplace/redhat-marketplace-2wbw2" Oct 01 06:07:02 crc kubenswrapper[4661]: I1001 06:07:02.044754 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37d0853a-88e6-4afe-8c93-2d4d33f19589-utilities\") pod \"redhat-marketplace-2wbw2\" (UID: \"37d0853a-88e6-4afe-8c93-2d4d33f19589\") " pod="openshift-marketplace/redhat-marketplace-2wbw2" Oct 01 06:07:02 crc kubenswrapper[4661]: I1001 06:07:02.044798 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37d0853a-88e6-4afe-8c93-2d4d33f19589-catalog-content\") pod \"redhat-marketplace-2wbw2\" (UID: \"37d0853a-88e6-4afe-8c93-2d4d33f19589\") " pod="openshift-marketplace/redhat-marketplace-2wbw2" Oct 01 06:07:02 crc kubenswrapper[4661]: I1001 06:07:02.045302 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37d0853a-88e6-4afe-8c93-2d4d33f19589-utilities\") pod \"redhat-marketplace-2wbw2\" (UID: \"37d0853a-88e6-4afe-8c93-2d4d33f19589\") " pod="openshift-marketplace/redhat-marketplace-2wbw2" Oct 01 06:07:02 crc kubenswrapper[4661]: I1001 06:07:02.045384 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37d0853a-88e6-4afe-8c93-2d4d33f19589-catalog-content\") pod \"redhat-marketplace-2wbw2\" (UID: \"37d0853a-88e6-4afe-8c93-2d4d33f19589\") " pod="openshift-marketplace/redhat-marketplace-2wbw2" Oct 01 06:07:02 crc kubenswrapper[4661]: I1001 06:07:02.067241 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxphx\" (UniqueName: \"kubernetes.io/projected/37d0853a-88e6-4afe-8c93-2d4d33f19589-kube-api-access-zxphx\") pod \"redhat-marketplace-2wbw2\" (UID: \"37d0853a-88e6-4afe-8c93-2d4d33f19589\") " pod="openshift-marketplace/redhat-marketplace-2wbw2" Oct 01 06:07:02 crc kubenswrapper[4661]: I1001 06:07:02.133340 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2wbw2" Oct 01 06:07:02 crc kubenswrapper[4661]: I1001 06:07:02.607920 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2wbw2"] Oct 01 06:07:03 crc kubenswrapper[4661]: I1001 06:07:03.419675 4661 generic.go:334] "Generic (PLEG): container finished" podID="37d0853a-88e6-4afe-8c93-2d4d33f19589" containerID="4448c8d4cba4a59f5b86f29e65e3da9cdb5b8464f2f9abcefd8012a4c28b461e" exitCode=0 Oct 01 06:07:03 crc kubenswrapper[4661]: I1001 06:07:03.419834 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2wbw2" event={"ID":"37d0853a-88e6-4afe-8c93-2d4d33f19589","Type":"ContainerDied","Data":"4448c8d4cba4a59f5b86f29e65e3da9cdb5b8464f2f9abcefd8012a4c28b461e"} Oct 01 06:07:03 crc kubenswrapper[4661]: I1001 06:07:03.419927 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2wbw2" event={"ID":"37d0853a-88e6-4afe-8c93-2d4d33f19589","Type":"ContainerStarted","Data":"855ef84f30e3162acb92e7cce9db2a6881d96742848ae26cf0becd3bd7a6089d"} Oct 01 06:07:04 crc kubenswrapper[4661]: I1001 06:07:04.435572 4661 generic.go:334] "Generic (PLEG): container finished" podID="37d0853a-88e6-4afe-8c93-2d4d33f19589" containerID="2e6473a53c47c447d059b0dcf6f8fe7c87ca1ef5c4febbbc2af0fa82bed9b0c7" exitCode=0 Oct 01 06:07:04 crc kubenswrapper[4661]: I1001 06:07:04.435628 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2wbw2" event={"ID":"37d0853a-88e6-4afe-8c93-2d4d33f19589","Type":"ContainerDied","Data":"2e6473a53c47c447d059b0dcf6f8fe7c87ca1ef5c4febbbc2af0fa82bed9b0c7"} Oct 01 06:07:05 crc kubenswrapper[4661]: I1001 06:07:05.455940 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2wbw2" event={"ID":"37d0853a-88e6-4afe-8c93-2d4d33f19589","Type":"ContainerStarted","Data":"297c3fb0f622fb929eced4723fcee705d7365ebd03c20f6007f0fe49e9a135cd"} Oct 01 06:07:05 crc kubenswrapper[4661]: I1001 06:07:05.487360 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2wbw2" podStartSLOduration=3.016477483 podStartE2EDuration="4.487332616s" podCreationTimestamp="2025-10-01 06:07:01 +0000 UTC" firstStartedPulling="2025-10-01 06:07:03.423933112 +0000 UTC m=+2272.361911766" lastFinishedPulling="2025-10-01 06:07:04.894788245 +0000 UTC m=+2273.832766899" observedRunningTime="2025-10-01 06:07:05.478901786 +0000 UTC m=+2274.416880410" watchObservedRunningTime="2025-10-01 06:07:05.487332616 +0000 UTC m=+2274.425311270" Oct 01 06:07:09 crc kubenswrapper[4661]: I1001 06:07:09.757892 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:07:09 crc kubenswrapper[4661]: E1001 06:07:09.758932 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:07:12 crc kubenswrapper[4661]: I1001 06:07:12.133591 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2wbw2" Oct 01 06:07:12 crc kubenswrapper[4661]: I1001 06:07:12.134030 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2wbw2" Oct 01 06:07:12 crc kubenswrapper[4661]: I1001 06:07:12.217794 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2wbw2" Oct 01 06:07:12 crc kubenswrapper[4661]: I1001 06:07:12.643461 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2wbw2" Oct 01 06:07:12 crc kubenswrapper[4661]: I1001 06:07:12.695722 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2wbw2"] Oct 01 06:07:14 crc kubenswrapper[4661]: I1001 06:07:14.566954 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2wbw2" podUID="37d0853a-88e6-4afe-8c93-2d4d33f19589" containerName="registry-server" containerID="cri-o://297c3fb0f622fb929eced4723fcee705d7365ebd03c20f6007f0fe49e9a135cd" gracePeriod=2 Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.132071 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2wbw2" Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.258362 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxphx\" (UniqueName: \"kubernetes.io/projected/37d0853a-88e6-4afe-8c93-2d4d33f19589-kube-api-access-zxphx\") pod \"37d0853a-88e6-4afe-8c93-2d4d33f19589\" (UID: \"37d0853a-88e6-4afe-8c93-2d4d33f19589\") " Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.258440 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37d0853a-88e6-4afe-8c93-2d4d33f19589-catalog-content\") pod \"37d0853a-88e6-4afe-8c93-2d4d33f19589\" (UID: \"37d0853a-88e6-4afe-8c93-2d4d33f19589\") " Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.258488 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37d0853a-88e6-4afe-8c93-2d4d33f19589-utilities\") pod \"37d0853a-88e6-4afe-8c93-2d4d33f19589\" (UID: \"37d0853a-88e6-4afe-8c93-2d4d33f19589\") " Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.259511 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37d0853a-88e6-4afe-8c93-2d4d33f19589-utilities" (OuterVolumeSpecName: "utilities") pod "37d0853a-88e6-4afe-8c93-2d4d33f19589" (UID: "37d0853a-88e6-4afe-8c93-2d4d33f19589"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.268333 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37d0853a-88e6-4afe-8c93-2d4d33f19589-kube-api-access-zxphx" (OuterVolumeSpecName: "kube-api-access-zxphx") pod "37d0853a-88e6-4afe-8c93-2d4d33f19589" (UID: "37d0853a-88e6-4afe-8c93-2d4d33f19589"). InnerVolumeSpecName "kube-api-access-zxphx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.274798 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37d0853a-88e6-4afe-8c93-2d4d33f19589-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "37d0853a-88e6-4afe-8c93-2d4d33f19589" (UID: "37d0853a-88e6-4afe-8c93-2d4d33f19589"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.360491 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxphx\" (UniqueName: \"kubernetes.io/projected/37d0853a-88e6-4afe-8c93-2d4d33f19589-kube-api-access-zxphx\") on node \"crc\" DevicePath \"\"" Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.360530 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37d0853a-88e6-4afe-8c93-2d4d33f19589-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.360542 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37d0853a-88e6-4afe-8c93-2d4d33f19589-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.585089 4661 generic.go:334] "Generic (PLEG): container finished" podID="37d0853a-88e6-4afe-8c93-2d4d33f19589" containerID="297c3fb0f622fb929eced4723fcee705d7365ebd03c20f6007f0fe49e9a135cd" exitCode=0 Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.585148 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2wbw2" event={"ID":"37d0853a-88e6-4afe-8c93-2d4d33f19589","Type":"ContainerDied","Data":"297c3fb0f622fb929eced4723fcee705d7365ebd03c20f6007f0fe49e9a135cd"} Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.585187 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2wbw2" event={"ID":"37d0853a-88e6-4afe-8c93-2d4d33f19589","Type":"ContainerDied","Data":"855ef84f30e3162acb92e7cce9db2a6881d96742848ae26cf0becd3bd7a6089d"} Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.585214 4661 scope.go:117] "RemoveContainer" containerID="297c3fb0f622fb929eced4723fcee705d7365ebd03c20f6007f0fe49e9a135cd" Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.585238 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2wbw2" Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.619501 4661 scope.go:117] "RemoveContainer" containerID="2e6473a53c47c447d059b0dcf6f8fe7c87ca1ef5c4febbbc2af0fa82bed9b0c7" Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.654274 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2wbw2"] Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.667254 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2wbw2"] Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.672341 4661 scope.go:117] "RemoveContainer" containerID="4448c8d4cba4a59f5b86f29e65e3da9cdb5b8464f2f9abcefd8012a4c28b461e" Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.719786 4661 scope.go:117] "RemoveContainer" containerID="297c3fb0f622fb929eced4723fcee705d7365ebd03c20f6007f0fe49e9a135cd" Oct 01 06:07:15 crc kubenswrapper[4661]: E1001 06:07:15.720230 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"297c3fb0f622fb929eced4723fcee705d7365ebd03c20f6007f0fe49e9a135cd\": container with ID starting with 297c3fb0f622fb929eced4723fcee705d7365ebd03c20f6007f0fe49e9a135cd not found: ID does not exist" containerID="297c3fb0f622fb929eced4723fcee705d7365ebd03c20f6007f0fe49e9a135cd" Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.720330 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"297c3fb0f622fb929eced4723fcee705d7365ebd03c20f6007f0fe49e9a135cd"} err="failed to get container status \"297c3fb0f622fb929eced4723fcee705d7365ebd03c20f6007f0fe49e9a135cd\": rpc error: code = NotFound desc = could not find container \"297c3fb0f622fb929eced4723fcee705d7365ebd03c20f6007f0fe49e9a135cd\": container with ID starting with 297c3fb0f622fb929eced4723fcee705d7365ebd03c20f6007f0fe49e9a135cd not found: ID does not exist" Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.720377 4661 scope.go:117] "RemoveContainer" containerID="2e6473a53c47c447d059b0dcf6f8fe7c87ca1ef5c4febbbc2af0fa82bed9b0c7" Oct 01 06:07:15 crc kubenswrapper[4661]: E1001 06:07:15.720753 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e6473a53c47c447d059b0dcf6f8fe7c87ca1ef5c4febbbc2af0fa82bed9b0c7\": container with ID starting with 2e6473a53c47c447d059b0dcf6f8fe7c87ca1ef5c4febbbc2af0fa82bed9b0c7 not found: ID does not exist" containerID="2e6473a53c47c447d059b0dcf6f8fe7c87ca1ef5c4febbbc2af0fa82bed9b0c7" Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.720990 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e6473a53c47c447d059b0dcf6f8fe7c87ca1ef5c4febbbc2af0fa82bed9b0c7"} err="failed to get container status \"2e6473a53c47c447d059b0dcf6f8fe7c87ca1ef5c4febbbc2af0fa82bed9b0c7\": rpc error: code = NotFound desc = could not find container \"2e6473a53c47c447d059b0dcf6f8fe7c87ca1ef5c4febbbc2af0fa82bed9b0c7\": container with ID starting with 2e6473a53c47c447d059b0dcf6f8fe7c87ca1ef5c4febbbc2af0fa82bed9b0c7 not found: ID does not exist" Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.721028 4661 scope.go:117] "RemoveContainer" containerID="4448c8d4cba4a59f5b86f29e65e3da9cdb5b8464f2f9abcefd8012a4c28b461e" Oct 01 06:07:15 crc kubenswrapper[4661]: E1001 06:07:15.721673 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4448c8d4cba4a59f5b86f29e65e3da9cdb5b8464f2f9abcefd8012a4c28b461e\": container with ID starting with 4448c8d4cba4a59f5b86f29e65e3da9cdb5b8464f2f9abcefd8012a4c28b461e not found: ID does not exist" containerID="4448c8d4cba4a59f5b86f29e65e3da9cdb5b8464f2f9abcefd8012a4c28b461e" Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.721726 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4448c8d4cba4a59f5b86f29e65e3da9cdb5b8464f2f9abcefd8012a4c28b461e"} err="failed to get container status \"4448c8d4cba4a59f5b86f29e65e3da9cdb5b8464f2f9abcefd8012a4c28b461e\": rpc error: code = NotFound desc = could not find container \"4448c8d4cba4a59f5b86f29e65e3da9cdb5b8464f2f9abcefd8012a4c28b461e\": container with ID starting with 4448c8d4cba4a59f5b86f29e65e3da9cdb5b8464f2f9abcefd8012a4c28b461e not found: ID does not exist" Oct 01 06:07:15 crc kubenswrapper[4661]: I1001 06:07:15.775609 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37d0853a-88e6-4afe-8c93-2d4d33f19589" path="/var/lib/kubelet/pods/37d0853a-88e6-4afe-8c93-2d4d33f19589/volumes" Oct 01 06:07:20 crc kubenswrapper[4661]: I1001 06:07:20.756984 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:07:20 crc kubenswrapper[4661]: E1001 06:07:20.759280 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:07:32 crc kubenswrapper[4661]: I1001 06:07:32.758206 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:07:32 crc kubenswrapper[4661]: E1001 06:07:32.759365 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:07:44 crc kubenswrapper[4661]: I1001 06:07:44.757280 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:07:44 crc kubenswrapper[4661]: E1001 06:07:44.758525 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:07:55 crc kubenswrapper[4661]: I1001 06:07:55.757862 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:07:55 crc kubenswrapper[4661]: E1001 06:07:55.758673 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:08:07 crc kubenswrapper[4661]: I1001 06:08:07.757467 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:08:07 crc kubenswrapper[4661]: E1001 06:08:07.761112 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:08:19 crc kubenswrapper[4661]: I1001 06:08:19.757395 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:08:19 crc kubenswrapper[4661]: E1001 06:08:19.758734 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:08:31 crc kubenswrapper[4661]: I1001 06:08:31.769585 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:08:31 crc kubenswrapper[4661]: E1001 06:08:31.770978 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:08:43 crc kubenswrapper[4661]: I1001 06:08:43.757258 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:08:43 crc kubenswrapper[4661]: E1001 06:08:43.758375 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:08:54 crc kubenswrapper[4661]: I1001 06:08:54.756661 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:08:54 crc kubenswrapper[4661]: E1001 06:08:54.757616 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:09:09 crc kubenswrapper[4661]: I1001 06:09:09.757666 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:09:09 crc kubenswrapper[4661]: E1001 06:09:09.760415 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:09:20 crc kubenswrapper[4661]: I1001 06:09:20.758106 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:09:20 crc kubenswrapper[4661]: E1001 06:09:20.759248 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:09:32 crc kubenswrapper[4661]: I1001 06:09:32.757566 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:09:32 crc kubenswrapper[4661]: E1001 06:09:32.759826 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:09:47 crc kubenswrapper[4661]: I1001 06:09:47.757119 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:09:47 crc kubenswrapper[4661]: E1001 06:09:47.758133 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:10:00 crc kubenswrapper[4661]: I1001 06:10:00.757417 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:10:00 crc kubenswrapper[4661]: E1001 06:10:00.759127 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:10:15 crc kubenswrapper[4661]: I1001 06:10:15.756876 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:10:15 crc kubenswrapper[4661]: E1001 06:10:15.757692 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:10:26 crc kubenswrapper[4661]: I1001 06:10:26.757452 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:10:26 crc kubenswrapper[4661]: E1001 06:10:26.758461 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:10:41 crc kubenswrapper[4661]: I1001 06:10:41.768957 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:10:41 crc kubenswrapper[4661]: E1001 06:10:41.770048 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:10:55 crc kubenswrapper[4661]: I1001 06:10:55.757175 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:10:55 crc kubenswrapper[4661]: E1001 06:10:55.758018 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:11:10 crc kubenswrapper[4661]: I1001 06:11:10.775214 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:11:11 crc kubenswrapper[4661]: I1001 06:11:11.551161 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"55135fa47d8d7e2b525c51e0ba1348ccb829754a9a8e2b8714e1ffd134bbfde4"} Oct 01 06:11:26 crc kubenswrapper[4661]: I1001 06:11:26.736051 4661 generic.go:334] "Generic (PLEG): container finished" podID="322bbaf3-0120-49be-90f1-04d42199e753" containerID="1664c206624cbdc4ae539e9fa0cd5c19ebe426236b117f11284b918005a1841b" exitCode=0 Oct 01 06:11:26 crc kubenswrapper[4661]: I1001 06:11:26.736333 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" event={"ID":"322bbaf3-0120-49be-90f1-04d42199e753","Type":"ContainerDied","Data":"1664c206624cbdc4ae539e9fa0cd5c19ebe426236b117f11284b918005a1841b"} Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.317868 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.336515 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q6djh\" (UniqueName: \"kubernetes.io/projected/322bbaf3-0120-49be-90f1-04d42199e753-kube-api-access-q6djh\") pod \"322bbaf3-0120-49be-90f1-04d42199e753\" (UID: \"322bbaf3-0120-49be-90f1-04d42199e753\") " Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.336589 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-libvirt-secret-0\") pod \"322bbaf3-0120-49be-90f1-04d42199e753\" (UID: \"322bbaf3-0120-49be-90f1-04d42199e753\") " Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.336821 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-libvirt-combined-ca-bundle\") pod \"322bbaf3-0120-49be-90f1-04d42199e753\" (UID: \"322bbaf3-0120-49be-90f1-04d42199e753\") " Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.336870 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-ssh-key\") pod \"322bbaf3-0120-49be-90f1-04d42199e753\" (UID: \"322bbaf3-0120-49be-90f1-04d42199e753\") " Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.336920 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-inventory\") pod \"322bbaf3-0120-49be-90f1-04d42199e753\" (UID: \"322bbaf3-0120-49be-90f1-04d42199e753\") " Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.347837 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "322bbaf3-0120-49be-90f1-04d42199e753" (UID: "322bbaf3-0120-49be-90f1-04d42199e753"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.347876 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/322bbaf3-0120-49be-90f1-04d42199e753-kube-api-access-q6djh" (OuterVolumeSpecName: "kube-api-access-q6djh") pod "322bbaf3-0120-49be-90f1-04d42199e753" (UID: "322bbaf3-0120-49be-90f1-04d42199e753"). InnerVolumeSpecName "kube-api-access-q6djh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.369115 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "322bbaf3-0120-49be-90f1-04d42199e753" (UID: "322bbaf3-0120-49be-90f1-04d42199e753"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.374466 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "322bbaf3-0120-49be-90f1-04d42199e753" (UID: "322bbaf3-0120-49be-90f1-04d42199e753"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.421693 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-inventory" (OuterVolumeSpecName: "inventory") pod "322bbaf3-0120-49be-90f1-04d42199e753" (UID: "322bbaf3-0120-49be-90f1-04d42199e753"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.439049 4661 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.439086 4661 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.439097 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q6djh\" (UniqueName: \"kubernetes.io/projected/322bbaf3-0120-49be-90f1-04d42199e753-kube-api-access-q6djh\") on node \"crc\" DevicePath \"\"" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.439111 4661 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.439121 4661 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/322bbaf3-0120-49be-90f1-04d42199e753-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.765913 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" event={"ID":"322bbaf3-0120-49be-90f1-04d42199e753","Type":"ContainerDied","Data":"33e8f64dccf56ca2b220323dcdf3f586e2355801f64cfc87b1b04a07619589e8"} Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.765969 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="33e8f64dccf56ca2b220323dcdf3f586e2355801f64cfc87b1b04a07619589e8" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.766058 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.875742 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm"] Oct 01 06:11:28 crc kubenswrapper[4661]: E1001 06:11:28.876234 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37d0853a-88e6-4afe-8c93-2d4d33f19589" containerName="extract-content" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.876255 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="37d0853a-88e6-4afe-8c93-2d4d33f19589" containerName="extract-content" Oct 01 06:11:28 crc kubenswrapper[4661]: E1001 06:11:28.876288 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="322bbaf3-0120-49be-90f1-04d42199e753" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.876297 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="322bbaf3-0120-49be-90f1-04d42199e753" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 01 06:11:28 crc kubenswrapper[4661]: E1001 06:11:28.876314 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37d0853a-88e6-4afe-8c93-2d4d33f19589" containerName="registry-server" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.876323 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="37d0853a-88e6-4afe-8c93-2d4d33f19589" containerName="registry-server" Oct 01 06:11:28 crc kubenswrapper[4661]: E1001 06:11:28.876340 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37d0853a-88e6-4afe-8c93-2d4d33f19589" containerName="extract-utilities" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.876348 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="37d0853a-88e6-4afe-8c93-2d4d33f19589" containerName="extract-utilities" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.876596 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="37d0853a-88e6-4afe-8c93-2d4d33f19589" containerName="registry-server" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.876624 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="322bbaf3-0120-49be-90f1-04d42199e753" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.877504 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.883290 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.883609 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.883820 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-srk7f" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.888167 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.888208 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.888234 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.888393 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 06:11:28 crc kubenswrapper[4661]: I1001 06:11:28.901192 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm"] Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.053008 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.053379 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.053477 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.053525 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.053557 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.053664 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfmg8\" (UniqueName: \"kubernetes.io/projected/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-kube-api-access-qfmg8\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.053721 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.053935 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.054050 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.156711 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.156861 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.156921 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.157017 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.157068 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.157097 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.157215 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfmg8\" (UniqueName: \"kubernetes.io/projected/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-kube-api-access-qfmg8\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.157271 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.157356 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.159525 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.163149 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.163479 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.163496 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.164016 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.164297 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.164610 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.164628 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.175176 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfmg8\" (UniqueName: \"kubernetes.io/projected/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-kube-api-access-qfmg8\") pod \"nova-edpm-deployment-openstack-edpm-ipam-2xtbm\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.208160 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.782837 4661 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 06:11:29 crc kubenswrapper[4661]: I1001 06:11:29.786396 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm"] Oct 01 06:11:30 crc kubenswrapper[4661]: I1001 06:11:30.789439 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" event={"ID":"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f","Type":"ContainerStarted","Data":"3b721abaa6c9ad47ea555e564de00a3714e611623f2c217ab1dfd07d018da8f3"} Oct 01 06:11:30 crc kubenswrapper[4661]: I1001 06:11:30.789941 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" event={"ID":"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f","Type":"ContainerStarted","Data":"18584037f8bdbdf89dd7ffa514c31e7231b894739473ca65526714cc9b659985"} Oct 01 06:11:30 crc kubenswrapper[4661]: I1001 06:11:30.812955 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" podStartSLOduration=2.3484913499999998 podStartE2EDuration="2.812940317s" podCreationTimestamp="2025-10-01 06:11:28 +0000 UTC" firstStartedPulling="2025-10-01 06:11:29.782552166 +0000 UTC m=+2538.720530780" lastFinishedPulling="2025-10-01 06:11:30.247001083 +0000 UTC m=+2539.184979747" observedRunningTime="2025-10-01 06:11:30.808691052 +0000 UTC m=+2539.746669666" watchObservedRunningTime="2025-10-01 06:11:30.812940317 +0000 UTC m=+2539.750918931" Oct 01 06:13:34 crc kubenswrapper[4661]: I1001 06:13:34.309737 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:13:34 crc kubenswrapper[4661]: I1001 06:13:34.310503 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:14:04 crc kubenswrapper[4661]: I1001 06:14:04.309315 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:14:04 crc kubenswrapper[4661]: I1001 06:14:04.309839 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:14:34 crc kubenswrapper[4661]: I1001 06:14:34.309780 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:14:34 crc kubenswrapper[4661]: I1001 06:14:34.310529 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:14:34 crc kubenswrapper[4661]: I1001 06:14:34.310604 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 06:14:34 crc kubenswrapper[4661]: I1001 06:14:34.311826 4661 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"55135fa47d8d7e2b525c51e0ba1348ccb829754a9a8e2b8714e1ffd134bbfde4"} pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 06:14:34 crc kubenswrapper[4661]: I1001 06:14:34.311934 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" containerID="cri-o://55135fa47d8d7e2b525c51e0ba1348ccb829754a9a8e2b8714e1ffd134bbfde4" gracePeriod=600 Oct 01 06:14:35 crc kubenswrapper[4661]: I1001 06:14:35.062966 4661 generic.go:334] "Generic (PLEG): container finished" podID="7584c4bc-4202-487e-a2b4-4319f428a792" containerID="55135fa47d8d7e2b525c51e0ba1348ccb829754a9a8e2b8714e1ffd134bbfde4" exitCode=0 Oct 01 06:14:35 crc kubenswrapper[4661]: I1001 06:14:35.063545 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerDied","Data":"55135fa47d8d7e2b525c51e0ba1348ccb829754a9a8e2b8714e1ffd134bbfde4"} Oct 01 06:14:35 crc kubenswrapper[4661]: I1001 06:14:35.063574 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971"} Oct 01 06:14:35 crc kubenswrapper[4661]: I1001 06:14:35.063590 4661 scope.go:117] "RemoveContainer" containerID="eca9870453f83973109ae070e296da55c08e0a347fc19f8acbfaec29ec39b31a" Oct 01 06:14:42 crc kubenswrapper[4661]: I1001 06:14:42.006894 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kv26l"] Oct 01 06:14:42 crc kubenswrapper[4661]: I1001 06:14:42.010193 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kv26l" Oct 01 06:14:42 crc kubenswrapper[4661]: I1001 06:14:42.017684 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kv26l"] Oct 01 06:14:42 crc kubenswrapper[4661]: I1001 06:14:42.084423 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bade17ee-ade5-4826-b56e-bef36a0a53f7-utilities\") pod \"community-operators-kv26l\" (UID: \"bade17ee-ade5-4826-b56e-bef36a0a53f7\") " pod="openshift-marketplace/community-operators-kv26l" Oct 01 06:14:42 crc kubenswrapper[4661]: I1001 06:14:42.084726 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bade17ee-ade5-4826-b56e-bef36a0a53f7-catalog-content\") pod \"community-operators-kv26l\" (UID: \"bade17ee-ade5-4826-b56e-bef36a0a53f7\") " pod="openshift-marketplace/community-operators-kv26l" Oct 01 06:14:42 crc kubenswrapper[4661]: I1001 06:14:42.084947 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvnj4\" (UniqueName: \"kubernetes.io/projected/bade17ee-ade5-4826-b56e-bef36a0a53f7-kube-api-access-wvnj4\") pod \"community-operators-kv26l\" (UID: \"bade17ee-ade5-4826-b56e-bef36a0a53f7\") " pod="openshift-marketplace/community-operators-kv26l" Oct 01 06:14:42 crc kubenswrapper[4661]: I1001 06:14:42.186458 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bade17ee-ade5-4826-b56e-bef36a0a53f7-utilities\") pod \"community-operators-kv26l\" (UID: \"bade17ee-ade5-4826-b56e-bef36a0a53f7\") " pod="openshift-marketplace/community-operators-kv26l" Oct 01 06:14:42 crc kubenswrapper[4661]: I1001 06:14:42.186604 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bade17ee-ade5-4826-b56e-bef36a0a53f7-catalog-content\") pod \"community-operators-kv26l\" (UID: \"bade17ee-ade5-4826-b56e-bef36a0a53f7\") " pod="openshift-marketplace/community-operators-kv26l" Oct 01 06:14:42 crc kubenswrapper[4661]: I1001 06:14:42.186673 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvnj4\" (UniqueName: \"kubernetes.io/projected/bade17ee-ade5-4826-b56e-bef36a0a53f7-kube-api-access-wvnj4\") pod \"community-operators-kv26l\" (UID: \"bade17ee-ade5-4826-b56e-bef36a0a53f7\") " pod="openshift-marketplace/community-operators-kv26l" Oct 01 06:14:42 crc kubenswrapper[4661]: I1001 06:14:42.187355 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bade17ee-ade5-4826-b56e-bef36a0a53f7-utilities\") pod \"community-operators-kv26l\" (UID: \"bade17ee-ade5-4826-b56e-bef36a0a53f7\") " pod="openshift-marketplace/community-operators-kv26l" Oct 01 06:14:42 crc kubenswrapper[4661]: I1001 06:14:42.187405 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bade17ee-ade5-4826-b56e-bef36a0a53f7-catalog-content\") pod \"community-operators-kv26l\" (UID: \"bade17ee-ade5-4826-b56e-bef36a0a53f7\") " pod="openshift-marketplace/community-operators-kv26l" Oct 01 06:14:42 crc kubenswrapper[4661]: I1001 06:14:42.223092 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvnj4\" (UniqueName: \"kubernetes.io/projected/bade17ee-ade5-4826-b56e-bef36a0a53f7-kube-api-access-wvnj4\") pod \"community-operators-kv26l\" (UID: \"bade17ee-ade5-4826-b56e-bef36a0a53f7\") " pod="openshift-marketplace/community-operators-kv26l" Oct 01 06:14:42 crc kubenswrapper[4661]: I1001 06:14:42.342204 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kv26l" Oct 01 06:14:42 crc kubenswrapper[4661]: I1001 06:14:42.896307 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kv26l"] Oct 01 06:14:42 crc kubenswrapper[4661]: W1001 06:14:42.897104 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbade17ee_ade5_4826_b56e_bef36a0a53f7.slice/crio-d57676ddf0226513bddf2cd52e8c33f69799fbd5fc89448355f8779b9b42168e WatchSource:0}: Error finding container d57676ddf0226513bddf2cd52e8c33f69799fbd5fc89448355f8779b9b42168e: Status 404 returned error can't find the container with id d57676ddf0226513bddf2cd52e8c33f69799fbd5fc89448355f8779b9b42168e Oct 01 06:14:43 crc kubenswrapper[4661]: I1001 06:14:43.171373 4661 generic.go:334] "Generic (PLEG): container finished" podID="bade17ee-ade5-4826-b56e-bef36a0a53f7" containerID="dfd21d258ba09c5e3050d423fd4ba2df2e2c59b7d53a9381adf720a90405c66e" exitCode=0 Oct 01 06:14:43 crc kubenswrapper[4661]: I1001 06:14:43.171447 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kv26l" event={"ID":"bade17ee-ade5-4826-b56e-bef36a0a53f7","Type":"ContainerDied","Data":"dfd21d258ba09c5e3050d423fd4ba2df2e2c59b7d53a9381adf720a90405c66e"} Oct 01 06:14:43 crc kubenswrapper[4661]: I1001 06:14:43.171875 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kv26l" event={"ID":"bade17ee-ade5-4826-b56e-bef36a0a53f7","Type":"ContainerStarted","Data":"d57676ddf0226513bddf2cd52e8c33f69799fbd5fc89448355f8779b9b42168e"} Oct 01 06:14:44 crc kubenswrapper[4661]: I1001 06:14:44.185114 4661 generic.go:334] "Generic (PLEG): container finished" podID="bade17ee-ade5-4826-b56e-bef36a0a53f7" containerID="b413e568b5d2fbdf98d8f97cc23391eb57a4ea90c006dbb661354d902d9d09ab" exitCode=0 Oct 01 06:14:44 crc kubenswrapper[4661]: I1001 06:14:44.185323 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kv26l" event={"ID":"bade17ee-ade5-4826-b56e-bef36a0a53f7","Type":"ContainerDied","Data":"b413e568b5d2fbdf98d8f97cc23391eb57a4ea90c006dbb661354d902d9d09ab"} Oct 01 06:14:45 crc kubenswrapper[4661]: I1001 06:14:45.217386 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kv26l" event={"ID":"bade17ee-ade5-4826-b56e-bef36a0a53f7","Type":"ContainerStarted","Data":"00593419616cf57b81a7ff0596ba7c0873e4bf3d196fcee7d3bba45d13ed26ae"} Oct 01 06:14:45 crc kubenswrapper[4661]: I1001 06:14:45.249065 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kv26l" podStartSLOduration=2.817739198 podStartE2EDuration="4.249035915s" podCreationTimestamp="2025-10-01 06:14:41 +0000 UTC" firstStartedPulling="2025-10-01 06:14:43.173222934 +0000 UTC m=+2732.111201548" lastFinishedPulling="2025-10-01 06:14:44.604519631 +0000 UTC m=+2733.542498265" observedRunningTime="2025-10-01 06:14:45.246137106 +0000 UTC m=+2734.184115730" watchObservedRunningTime="2025-10-01 06:14:45.249035915 +0000 UTC m=+2734.187014559" Oct 01 06:14:52 crc kubenswrapper[4661]: I1001 06:14:52.343187 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kv26l" Oct 01 06:14:52 crc kubenswrapper[4661]: I1001 06:14:52.344401 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kv26l" Oct 01 06:14:52 crc kubenswrapper[4661]: I1001 06:14:52.435976 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kv26l" Oct 01 06:14:53 crc kubenswrapper[4661]: I1001 06:14:53.374481 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kv26l" Oct 01 06:14:53 crc kubenswrapper[4661]: I1001 06:14:53.447810 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kv26l"] Oct 01 06:14:55 crc kubenswrapper[4661]: I1001 06:14:55.330289 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kv26l" podUID="bade17ee-ade5-4826-b56e-bef36a0a53f7" containerName="registry-server" containerID="cri-o://00593419616cf57b81a7ff0596ba7c0873e4bf3d196fcee7d3bba45d13ed26ae" gracePeriod=2 Oct 01 06:14:55 crc kubenswrapper[4661]: I1001 06:14:55.850092 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kv26l" Oct 01 06:14:55 crc kubenswrapper[4661]: I1001 06:14:55.962355 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvnj4\" (UniqueName: \"kubernetes.io/projected/bade17ee-ade5-4826-b56e-bef36a0a53f7-kube-api-access-wvnj4\") pod \"bade17ee-ade5-4826-b56e-bef36a0a53f7\" (UID: \"bade17ee-ade5-4826-b56e-bef36a0a53f7\") " Oct 01 06:14:55 crc kubenswrapper[4661]: I1001 06:14:55.962415 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bade17ee-ade5-4826-b56e-bef36a0a53f7-catalog-content\") pod \"bade17ee-ade5-4826-b56e-bef36a0a53f7\" (UID: \"bade17ee-ade5-4826-b56e-bef36a0a53f7\") " Oct 01 06:14:55 crc kubenswrapper[4661]: I1001 06:14:55.962439 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bade17ee-ade5-4826-b56e-bef36a0a53f7-utilities\") pod \"bade17ee-ade5-4826-b56e-bef36a0a53f7\" (UID: \"bade17ee-ade5-4826-b56e-bef36a0a53f7\") " Oct 01 06:14:55 crc kubenswrapper[4661]: I1001 06:14:55.963669 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bade17ee-ade5-4826-b56e-bef36a0a53f7-utilities" (OuterVolumeSpecName: "utilities") pod "bade17ee-ade5-4826-b56e-bef36a0a53f7" (UID: "bade17ee-ade5-4826-b56e-bef36a0a53f7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:14:55 crc kubenswrapper[4661]: I1001 06:14:55.973167 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bade17ee-ade5-4826-b56e-bef36a0a53f7-kube-api-access-wvnj4" (OuterVolumeSpecName: "kube-api-access-wvnj4") pod "bade17ee-ade5-4826-b56e-bef36a0a53f7" (UID: "bade17ee-ade5-4826-b56e-bef36a0a53f7"). InnerVolumeSpecName "kube-api-access-wvnj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:14:56 crc kubenswrapper[4661]: I1001 06:14:56.034756 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bade17ee-ade5-4826-b56e-bef36a0a53f7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bade17ee-ade5-4826-b56e-bef36a0a53f7" (UID: "bade17ee-ade5-4826-b56e-bef36a0a53f7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:14:56 crc kubenswrapper[4661]: I1001 06:14:56.065499 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvnj4\" (UniqueName: \"kubernetes.io/projected/bade17ee-ade5-4826-b56e-bef36a0a53f7-kube-api-access-wvnj4\") on node \"crc\" DevicePath \"\"" Oct 01 06:14:56 crc kubenswrapper[4661]: I1001 06:14:56.065555 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bade17ee-ade5-4826-b56e-bef36a0a53f7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:14:56 crc kubenswrapper[4661]: I1001 06:14:56.065579 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bade17ee-ade5-4826-b56e-bef36a0a53f7-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:14:56 crc kubenswrapper[4661]: I1001 06:14:56.346254 4661 generic.go:334] "Generic (PLEG): container finished" podID="bade17ee-ade5-4826-b56e-bef36a0a53f7" containerID="00593419616cf57b81a7ff0596ba7c0873e4bf3d196fcee7d3bba45d13ed26ae" exitCode=0 Oct 01 06:14:56 crc kubenswrapper[4661]: I1001 06:14:56.346341 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kv26l" event={"ID":"bade17ee-ade5-4826-b56e-bef36a0a53f7","Type":"ContainerDied","Data":"00593419616cf57b81a7ff0596ba7c0873e4bf3d196fcee7d3bba45d13ed26ae"} Oct 01 06:14:56 crc kubenswrapper[4661]: I1001 06:14:56.346790 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kv26l" event={"ID":"bade17ee-ade5-4826-b56e-bef36a0a53f7","Type":"ContainerDied","Data":"d57676ddf0226513bddf2cd52e8c33f69799fbd5fc89448355f8779b9b42168e"} Oct 01 06:14:56 crc kubenswrapper[4661]: I1001 06:14:56.346832 4661 scope.go:117] "RemoveContainer" containerID="00593419616cf57b81a7ff0596ba7c0873e4bf3d196fcee7d3bba45d13ed26ae" Oct 01 06:14:56 crc kubenswrapper[4661]: I1001 06:14:56.347068 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kv26l" Oct 01 06:14:56 crc kubenswrapper[4661]: I1001 06:14:56.396680 4661 scope.go:117] "RemoveContainer" containerID="b413e568b5d2fbdf98d8f97cc23391eb57a4ea90c006dbb661354d902d9d09ab" Oct 01 06:14:56 crc kubenswrapper[4661]: I1001 06:14:56.428611 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kv26l"] Oct 01 06:14:56 crc kubenswrapper[4661]: I1001 06:14:56.441372 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kv26l"] Oct 01 06:14:56 crc kubenswrapper[4661]: I1001 06:14:56.447314 4661 scope.go:117] "RemoveContainer" containerID="dfd21d258ba09c5e3050d423fd4ba2df2e2c59b7d53a9381adf720a90405c66e" Oct 01 06:14:56 crc kubenswrapper[4661]: I1001 06:14:56.497527 4661 scope.go:117] "RemoveContainer" containerID="00593419616cf57b81a7ff0596ba7c0873e4bf3d196fcee7d3bba45d13ed26ae" Oct 01 06:14:56 crc kubenswrapper[4661]: E1001 06:14:56.498047 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00593419616cf57b81a7ff0596ba7c0873e4bf3d196fcee7d3bba45d13ed26ae\": container with ID starting with 00593419616cf57b81a7ff0596ba7c0873e4bf3d196fcee7d3bba45d13ed26ae not found: ID does not exist" containerID="00593419616cf57b81a7ff0596ba7c0873e4bf3d196fcee7d3bba45d13ed26ae" Oct 01 06:14:56 crc kubenswrapper[4661]: I1001 06:14:56.498085 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00593419616cf57b81a7ff0596ba7c0873e4bf3d196fcee7d3bba45d13ed26ae"} err="failed to get container status \"00593419616cf57b81a7ff0596ba7c0873e4bf3d196fcee7d3bba45d13ed26ae\": rpc error: code = NotFound desc = could not find container \"00593419616cf57b81a7ff0596ba7c0873e4bf3d196fcee7d3bba45d13ed26ae\": container with ID starting with 00593419616cf57b81a7ff0596ba7c0873e4bf3d196fcee7d3bba45d13ed26ae not found: ID does not exist" Oct 01 06:14:56 crc kubenswrapper[4661]: I1001 06:14:56.498117 4661 scope.go:117] "RemoveContainer" containerID="b413e568b5d2fbdf98d8f97cc23391eb57a4ea90c006dbb661354d902d9d09ab" Oct 01 06:14:56 crc kubenswrapper[4661]: E1001 06:14:56.498366 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b413e568b5d2fbdf98d8f97cc23391eb57a4ea90c006dbb661354d902d9d09ab\": container with ID starting with b413e568b5d2fbdf98d8f97cc23391eb57a4ea90c006dbb661354d902d9d09ab not found: ID does not exist" containerID="b413e568b5d2fbdf98d8f97cc23391eb57a4ea90c006dbb661354d902d9d09ab" Oct 01 06:14:56 crc kubenswrapper[4661]: I1001 06:14:56.498395 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b413e568b5d2fbdf98d8f97cc23391eb57a4ea90c006dbb661354d902d9d09ab"} err="failed to get container status \"b413e568b5d2fbdf98d8f97cc23391eb57a4ea90c006dbb661354d902d9d09ab\": rpc error: code = NotFound desc = could not find container \"b413e568b5d2fbdf98d8f97cc23391eb57a4ea90c006dbb661354d902d9d09ab\": container with ID starting with b413e568b5d2fbdf98d8f97cc23391eb57a4ea90c006dbb661354d902d9d09ab not found: ID does not exist" Oct 01 06:14:56 crc kubenswrapper[4661]: I1001 06:14:56.498420 4661 scope.go:117] "RemoveContainer" containerID="dfd21d258ba09c5e3050d423fd4ba2df2e2c59b7d53a9381adf720a90405c66e" Oct 01 06:14:56 crc kubenswrapper[4661]: E1001 06:14:56.498620 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dfd21d258ba09c5e3050d423fd4ba2df2e2c59b7d53a9381adf720a90405c66e\": container with ID starting with dfd21d258ba09c5e3050d423fd4ba2df2e2c59b7d53a9381adf720a90405c66e not found: ID does not exist" containerID="dfd21d258ba09c5e3050d423fd4ba2df2e2c59b7d53a9381adf720a90405c66e" Oct 01 06:14:56 crc kubenswrapper[4661]: I1001 06:14:56.498688 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfd21d258ba09c5e3050d423fd4ba2df2e2c59b7d53a9381adf720a90405c66e"} err="failed to get container status \"dfd21d258ba09c5e3050d423fd4ba2df2e2c59b7d53a9381adf720a90405c66e\": rpc error: code = NotFound desc = could not find container \"dfd21d258ba09c5e3050d423fd4ba2df2e2c59b7d53a9381adf720a90405c66e\": container with ID starting with dfd21d258ba09c5e3050d423fd4ba2df2e2c59b7d53a9381adf720a90405c66e not found: ID does not exist" Oct 01 06:14:57 crc kubenswrapper[4661]: I1001 06:14:57.775086 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bade17ee-ade5-4826-b56e-bef36a0a53f7" path="/var/lib/kubelet/pods/bade17ee-ade5-4826-b56e-bef36a0a53f7/volumes" Oct 01 06:15:00 crc kubenswrapper[4661]: I1001 06:15:00.154234 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321655-xdvw5"] Oct 01 06:15:00 crc kubenswrapper[4661]: E1001 06:15:00.154971 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bade17ee-ade5-4826-b56e-bef36a0a53f7" containerName="registry-server" Oct 01 06:15:00 crc kubenswrapper[4661]: I1001 06:15:00.154987 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="bade17ee-ade5-4826-b56e-bef36a0a53f7" containerName="registry-server" Oct 01 06:15:00 crc kubenswrapper[4661]: E1001 06:15:00.155023 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bade17ee-ade5-4826-b56e-bef36a0a53f7" containerName="extract-content" Oct 01 06:15:00 crc kubenswrapper[4661]: I1001 06:15:00.155031 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="bade17ee-ade5-4826-b56e-bef36a0a53f7" containerName="extract-content" Oct 01 06:15:00 crc kubenswrapper[4661]: E1001 06:15:00.155066 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bade17ee-ade5-4826-b56e-bef36a0a53f7" containerName="extract-utilities" Oct 01 06:15:00 crc kubenswrapper[4661]: I1001 06:15:00.155075 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="bade17ee-ade5-4826-b56e-bef36a0a53f7" containerName="extract-utilities" Oct 01 06:15:00 crc kubenswrapper[4661]: I1001 06:15:00.155305 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="bade17ee-ade5-4826-b56e-bef36a0a53f7" containerName="registry-server" Oct 01 06:15:00 crc kubenswrapper[4661]: I1001 06:15:00.156164 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-xdvw5" Oct 01 06:15:00 crc kubenswrapper[4661]: I1001 06:15:00.159253 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 06:15:00 crc kubenswrapper[4661]: I1001 06:15:00.168842 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 06:15:00 crc kubenswrapper[4661]: I1001 06:15:00.179502 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321655-xdvw5"] Oct 01 06:15:00 crc kubenswrapper[4661]: I1001 06:15:00.282219 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8sm8w\" (UniqueName: \"kubernetes.io/projected/768189cb-583f-4e7f-b9e3-4f883491857a-kube-api-access-8sm8w\") pod \"collect-profiles-29321655-xdvw5\" (UID: \"768189cb-583f-4e7f-b9e3-4f883491857a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-xdvw5" Oct 01 06:15:00 crc kubenswrapper[4661]: I1001 06:15:00.282511 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/768189cb-583f-4e7f-b9e3-4f883491857a-secret-volume\") pod \"collect-profiles-29321655-xdvw5\" (UID: \"768189cb-583f-4e7f-b9e3-4f883491857a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-xdvw5" Oct 01 06:15:00 crc kubenswrapper[4661]: I1001 06:15:00.283355 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/768189cb-583f-4e7f-b9e3-4f883491857a-config-volume\") pod \"collect-profiles-29321655-xdvw5\" (UID: \"768189cb-583f-4e7f-b9e3-4f883491857a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-xdvw5" Oct 01 06:15:00 crc kubenswrapper[4661]: I1001 06:15:00.385349 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/768189cb-583f-4e7f-b9e3-4f883491857a-config-volume\") pod \"collect-profiles-29321655-xdvw5\" (UID: \"768189cb-583f-4e7f-b9e3-4f883491857a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-xdvw5" Oct 01 06:15:00 crc kubenswrapper[4661]: I1001 06:15:00.385512 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8sm8w\" (UniqueName: \"kubernetes.io/projected/768189cb-583f-4e7f-b9e3-4f883491857a-kube-api-access-8sm8w\") pod \"collect-profiles-29321655-xdvw5\" (UID: \"768189cb-583f-4e7f-b9e3-4f883491857a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-xdvw5" Oct 01 06:15:00 crc kubenswrapper[4661]: I1001 06:15:00.385727 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/768189cb-583f-4e7f-b9e3-4f883491857a-secret-volume\") pod \"collect-profiles-29321655-xdvw5\" (UID: \"768189cb-583f-4e7f-b9e3-4f883491857a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-xdvw5" Oct 01 06:15:00 crc kubenswrapper[4661]: I1001 06:15:00.388288 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/768189cb-583f-4e7f-b9e3-4f883491857a-config-volume\") pod \"collect-profiles-29321655-xdvw5\" (UID: \"768189cb-583f-4e7f-b9e3-4f883491857a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-xdvw5" Oct 01 06:15:00 crc kubenswrapper[4661]: I1001 06:15:00.396894 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/768189cb-583f-4e7f-b9e3-4f883491857a-secret-volume\") pod \"collect-profiles-29321655-xdvw5\" (UID: \"768189cb-583f-4e7f-b9e3-4f883491857a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-xdvw5" Oct 01 06:15:00 crc kubenswrapper[4661]: I1001 06:15:00.416763 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8sm8w\" (UniqueName: \"kubernetes.io/projected/768189cb-583f-4e7f-b9e3-4f883491857a-kube-api-access-8sm8w\") pod \"collect-profiles-29321655-xdvw5\" (UID: \"768189cb-583f-4e7f-b9e3-4f883491857a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-xdvw5" Oct 01 06:15:00 crc kubenswrapper[4661]: I1001 06:15:00.486496 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-xdvw5" Oct 01 06:15:00 crc kubenswrapper[4661]: I1001 06:15:00.817184 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321655-xdvw5"] Oct 01 06:15:01 crc kubenswrapper[4661]: I1001 06:15:01.412789 4661 generic.go:334] "Generic (PLEG): container finished" podID="768189cb-583f-4e7f-b9e3-4f883491857a" containerID="38601fdc47b9d0d435af274d1aebaa2138d8733ff3d433ee91b52f8bf6489988" exitCode=0 Oct 01 06:15:01 crc kubenswrapper[4661]: I1001 06:15:01.412992 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-xdvw5" event={"ID":"768189cb-583f-4e7f-b9e3-4f883491857a","Type":"ContainerDied","Data":"38601fdc47b9d0d435af274d1aebaa2138d8733ff3d433ee91b52f8bf6489988"} Oct 01 06:15:01 crc kubenswrapper[4661]: I1001 06:15:01.413169 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-xdvw5" event={"ID":"768189cb-583f-4e7f-b9e3-4f883491857a","Type":"ContainerStarted","Data":"de0a23b8ca8f08ffeaa009b16980227616575bb2157a2d469d1db48411ecc37a"} Oct 01 06:15:02 crc kubenswrapper[4661]: I1001 06:15:02.828602 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-xdvw5" Oct 01 06:15:02 crc kubenswrapper[4661]: I1001 06:15:02.873554 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/768189cb-583f-4e7f-b9e3-4f883491857a-secret-volume\") pod \"768189cb-583f-4e7f-b9e3-4f883491857a\" (UID: \"768189cb-583f-4e7f-b9e3-4f883491857a\") " Oct 01 06:15:02 crc kubenswrapper[4661]: I1001 06:15:02.873656 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8sm8w\" (UniqueName: \"kubernetes.io/projected/768189cb-583f-4e7f-b9e3-4f883491857a-kube-api-access-8sm8w\") pod \"768189cb-583f-4e7f-b9e3-4f883491857a\" (UID: \"768189cb-583f-4e7f-b9e3-4f883491857a\") " Oct 01 06:15:02 crc kubenswrapper[4661]: I1001 06:15:02.873727 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/768189cb-583f-4e7f-b9e3-4f883491857a-config-volume\") pod \"768189cb-583f-4e7f-b9e3-4f883491857a\" (UID: \"768189cb-583f-4e7f-b9e3-4f883491857a\") " Oct 01 06:15:02 crc kubenswrapper[4661]: I1001 06:15:02.875583 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/768189cb-583f-4e7f-b9e3-4f883491857a-config-volume" (OuterVolumeSpecName: "config-volume") pod "768189cb-583f-4e7f-b9e3-4f883491857a" (UID: "768189cb-583f-4e7f-b9e3-4f883491857a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:15:02 crc kubenswrapper[4661]: I1001 06:15:02.884509 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/768189cb-583f-4e7f-b9e3-4f883491857a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "768189cb-583f-4e7f-b9e3-4f883491857a" (UID: "768189cb-583f-4e7f-b9e3-4f883491857a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:15:02 crc kubenswrapper[4661]: I1001 06:15:02.891924 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/768189cb-583f-4e7f-b9e3-4f883491857a-kube-api-access-8sm8w" (OuterVolumeSpecName: "kube-api-access-8sm8w") pod "768189cb-583f-4e7f-b9e3-4f883491857a" (UID: "768189cb-583f-4e7f-b9e3-4f883491857a"). InnerVolumeSpecName "kube-api-access-8sm8w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:15:02 crc kubenswrapper[4661]: I1001 06:15:02.977159 4661 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/768189cb-583f-4e7f-b9e3-4f883491857a-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 06:15:02 crc kubenswrapper[4661]: I1001 06:15:02.977205 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8sm8w\" (UniqueName: \"kubernetes.io/projected/768189cb-583f-4e7f-b9e3-4f883491857a-kube-api-access-8sm8w\") on node \"crc\" DevicePath \"\"" Oct 01 06:15:02 crc kubenswrapper[4661]: I1001 06:15:02.977226 4661 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/768189cb-583f-4e7f-b9e3-4f883491857a-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 06:15:03 crc kubenswrapper[4661]: I1001 06:15:03.440170 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-xdvw5" event={"ID":"768189cb-583f-4e7f-b9e3-4f883491857a","Type":"ContainerDied","Data":"de0a23b8ca8f08ffeaa009b16980227616575bb2157a2d469d1db48411ecc37a"} Oct 01 06:15:03 crc kubenswrapper[4661]: I1001 06:15:03.440228 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de0a23b8ca8f08ffeaa009b16980227616575bb2157a2d469d1db48411ecc37a" Oct 01 06:15:03 crc kubenswrapper[4661]: I1001 06:15:03.440257 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-xdvw5" Oct 01 06:15:03 crc kubenswrapper[4661]: I1001 06:15:03.927573 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz"] Oct 01 06:15:03 crc kubenswrapper[4661]: I1001 06:15:03.943685 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321610-wjxrz"] Oct 01 06:15:05 crc kubenswrapper[4661]: I1001 06:15:05.779416 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a731ee04-6aba-49d9-b8b0-392d31d55da2" path="/var/lib/kubelet/pods/a731ee04-6aba-49d9-b8b0-392d31d55da2/volumes" Oct 01 06:15:22 crc kubenswrapper[4661]: I1001 06:15:22.622713 4661 scope.go:117] "RemoveContainer" containerID="33db51cd837328066456c67b6fbda93fe880a51f254777aec2aaa59817582f10" Oct 01 06:15:35 crc kubenswrapper[4661]: E1001 06:15:35.593089 4661 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2e4df47f_3fbf_4a44_89d7_fd97b1fef95f.slice/crio-conmon-3b721abaa6c9ad47ea555e564de00a3714e611623f2c217ab1dfd07d018da8f3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2e4df47f_3fbf_4a44_89d7_fd97b1fef95f.slice/crio-3b721abaa6c9ad47ea555e564de00a3714e611623f2c217ab1dfd07d018da8f3.scope\": RecentStats: unable to find data in memory cache]" Oct 01 06:15:35 crc kubenswrapper[4661]: I1001 06:15:35.833239 4661 generic.go:334] "Generic (PLEG): container finished" podID="2e4df47f-3fbf-4a44-89d7-fd97b1fef95f" containerID="3b721abaa6c9ad47ea555e564de00a3714e611623f2c217ab1dfd07d018da8f3" exitCode=0 Oct 01 06:15:35 crc kubenswrapper[4661]: I1001 06:15:35.833310 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" event={"ID":"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f","Type":"ContainerDied","Data":"3b721abaa6c9ad47ea555e564de00a3714e611623f2c217ab1dfd07d018da8f3"} Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.325601 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.372147 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-migration-ssh-key-1\") pod \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.372551 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-cell1-compute-config-0\") pod \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.372926 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-migration-ssh-key-0\") pod \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.373124 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfmg8\" (UniqueName: \"kubernetes.io/projected/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-kube-api-access-qfmg8\") pod \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.373380 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-extra-config-0\") pod \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.374023 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-inventory\") pod \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.374225 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-cell1-compute-config-1\") pod \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.374456 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-combined-ca-bundle\") pod \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.374715 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-ssh-key\") pod \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\" (UID: \"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f\") " Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.380533 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-kube-api-access-qfmg8" (OuterVolumeSpecName: "kube-api-access-qfmg8") pod "2e4df47f-3fbf-4a44-89d7-fd97b1fef95f" (UID: "2e4df47f-3fbf-4a44-89d7-fd97b1fef95f"). InnerVolumeSpecName "kube-api-access-qfmg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.381781 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "2e4df47f-3fbf-4a44-89d7-fd97b1fef95f" (UID: "2e4df47f-3fbf-4a44-89d7-fd97b1fef95f"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.403480 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "2e4df47f-3fbf-4a44-89d7-fd97b1fef95f" (UID: "2e4df47f-3fbf-4a44-89d7-fd97b1fef95f"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.418977 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-inventory" (OuterVolumeSpecName: "inventory") pod "2e4df47f-3fbf-4a44-89d7-fd97b1fef95f" (UID: "2e4df47f-3fbf-4a44-89d7-fd97b1fef95f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.419341 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "2e4df47f-3fbf-4a44-89d7-fd97b1fef95f" (UID: "2e4df47f-3fbf-4a44-89d7-fd97b1fef95f"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.419628 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "2e4df47f-3fbf-4a44-89d7-fd97b1fef95f" (UID: "2e4df47f-3fbf-4a44-89d7-fd97b1fef95f"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.435916 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2e4df47f-3fbf-4a44-89d7-fd97b1fef95f" (UID: "2e4df47f-3fbf-4a44-89d7-fd97b1fef95f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.442571 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "2e4df47f-3fbf-4a44-89d7-fd97b1fef95f" (UID: "2e4df47f-3fbf-4a44-89d7-fd97b1fef95f"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.442563 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "2e4df47f-3fbf-4a44-89d7-fd97b1fef95f" (UID: "2e4df47f-3fbf-4a44-89d7-fd97b1fef95f"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.477104 4661 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.477264 4661 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.477382 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfmg8\" (UniqueName: \"kubernetes.io/projected/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-kube-api-access-qfmg8\") on node \"crc\" DevicePath \"\"" Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.477479 4661 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.477559 4661 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.477704 4661 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.477786 4661 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.477860 4661 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.477941 4661 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/2e4df47f-3fbf-4a44-89d7-fd97b1fef95f-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.862726 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" event={"ID":"2e4df47f-3fbf-4a44-89d7-fd97b1fef95f","Type":"ContainerDied","Data":"18584037f8bdbdf89dd7ffa514c31e7231b894739473ca65526714cc9b659985"} Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.862791 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18584037f8bdbdf89dd7ffa514c31e7231b894739473ca65526714cc9b659985" Oct 01 06:15:37 crc kubenswrapper[4661]: I1001 06:15:37.862980 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-2xtbm" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.001370 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs"] Oct 01 06:15:38 crc kubenswrapper[4661]: E1001 06:15:38.002012 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e4df47f-3fbf-4a44-89d7-fd97b1fef95f" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.002037 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e4df47f-3fbf-4a44-89d7-fd97b1fef95f" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 01 06:15:38 crc kubenswrapper[4661]: E1001 06:15:38.002051 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="768189cb-583f-4e7f-b9e3-4f883491857a" containerName="collect-profiles" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.002057 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="768189cb-583f-4e7f-b9e3-4f883491857a" containerName="collect-profiles" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.002266 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="768189cb-583f-4e7f-b9e3-4f883491857a" containerName="collect-profiles" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.002285 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e4df47f-3fbf-4a44-89d7-fd97b1fef95f" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.002959 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.006180 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.006474 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.006696 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-srk7f" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.006891 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.008581 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.055680 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs"] Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.095076 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.095144 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.095467 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.095562 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.095706 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.095853 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.095911 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gqqj\" (UniqueName: \"kubernetes.io/projected/e2078d83-8d53-4052-8b77-031948bc8705-kube-api-access-4gqqj\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.197380 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.197435 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.197498 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.197524 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.197557 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.197609 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.197657 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gqqj\" (UniqueName: \"kubernetes.io/projected/e2078d83-8d53-4052-8b77-031948bc8705-kube-api-access-4gqqj\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.201367 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.201760 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.202625 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.203081 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.203392 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.205092 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.214837 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gqqj\" (UniqueName: \"kubernetes.io/projected/e2078d83-8d53-4052-8b77-031948bc8705-kube-api-access-4gqqj\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:38 crc kubenswrapper[4661]: I1001 06:15:38.339014 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:15:39 crc kubenswrapper[4661]: I1001 06:15:39.005545 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs"] Oct 01 06:15:39 crc kubenswrapper[4661]: W1001 06:15:39.020962 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2078d83_8d53_4052_8b77_031948bc8705.slice/crio-0d687289c3a32abb10416dbe1646efc4e48515efad40134793c3ab1699696152 WatchSource:0}: Error finding container 0d687289c3a32abb10416dbe1646efc4e48515efad40134793c3ab1699696152: Status 404 returned error can't find the container with id 0d687289c3a32abb10416dbe1646efc4e48515efad40134793c3ab1699696152 Oct 01 06:15:39 crc kubenswrapper[4661]: I1001 06:15:39.894157 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" event={"ID":"e2078d83-8d53-4052-8b77-031948bc8705","Type":"ContainerStarted","Data":"0d687289c3a32abb10416dbe1646efc4e48515efad40134793c3ab1699696152"} Oct 01 06:15:40 crc kubenswrapper[4661]: I1001 06:15:40.914087 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" event={"ID":"e2078d83-8d53-4052-8b77-031948bc8705","Type":"ContainerStarted","Data":"23cef719f65ef2bd0b0f164ac093d603f9e00d25656b85df13b8daaaa6ff679c"} Oct 01 06:15:40 crc kubenswrapper[4661]: I1001 06:15:40.944662 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" podStartSLOduration=3.356536266 podStartE2EDuration="3.944624074s" podCreationTimestamp="2025-10-01 06:15:37 +0000 UTC" firstStartedPulling="2025-10-01 06:15:39.026856095 +0000 UTC m=+2787.964834719" lastFinishedPulling="2025-10-01 06:15:39.614943873 +0000 UTC m=+2788.552922527" observedRunningTime="2025-10-01 06:15:40.937294414 +0000 UTC m=+2789.875273068" watchObservedRunningTime="2025-10-01 06:15:40.944624074 +0000 UTC m=+2789.882602688" Oct 01 06:16:24 crc kubenswrapper[4661]: I1001 06:16:24.273284 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8zd25"] Oct 01 06:16:24 crc kubenswrapper[4661]: I1001 06:16:24.278520 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8zd25" Oct 01 06:16:24 crc kubenswrapper[4661]: I1001 06:16:24.289202 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8zd25"] Oct 01 06:16:24 crc kubenswrapper[4661]: I1001 06:16:24.422311 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6b7k\" (UniqueName: \"kubernetes.io/projected/1716b40b-a4a1-4374-b013-d7dc8cadf424-kube-api-access-p6b7k\") pod \"redhat-operators-8zd25\" (UID: \"1716b40b-a4a1-4374-b013-d7dc8cadf424\") " pod="openshift-marketplace/redhat-operators-8zd25" Oct 01 06:16:24 crc kubenswrapper[4661]: I1001 06:16:24.422368 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1716b40b-a4a1-4374-b013-d7dc8cadf424-catalog-content\") pod \"redhat-operators-8zd25\" (UID: \"1716b40b-a4a1-4374-b013-d7dc8cadf424\") " pod="openshift-marketplace/redhat-operators-8zd25" Oct 01 06:16:24 crc kubenswrapper[4661]: I1001 06:16:24.422403 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1716b40b-a4a1-4374-b013-d7dc8cadf424-utilities\") pod \"redhat-operators-8zd25\" (UID: \"1716b40b-a4a1-4374-b013-d7dc8cadf424\") " pod="openshift-marketplace/redhat-operators-8zd25" Oct 01 06:16:24 crc kubenswrapper[4661]: I1001 06:16:24.524417 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6b7k\" (UniqueName: \"kubernetes.io/projected/1716b40b-a4a1-4374-b013-d7dc8cadf424-kube-api-access-p6b7k\") pod \"redhat-operators-8zd25\" (UID: \"1716b40b-a4a1-4374-b013-d7dc8cadf424\") " pod="openshift-marketplace/redhat-operators-8zd25" Oct 01 06:16:24 crc kubenswrapper[4661]: I1001 06:16:24.524485 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1716b40b-a4a1-4374-b013-d7dc8cadf424-catalog-content\") pod \"redhat-operators-8zd25\" (UID: \"1716b40b-a4a1-4374-b013-d7dc8cadf424\") " pod="openshift-marketplace/redhat-operators-8zd25" Oct 01 06:16:24 crc kubenswrapper[4661]: I1001 06:16:24.524516 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1716b40b-a4a1-4374-b013-d7dc8cadf424-utilities\") pod \"redhat-operators-8zd25\" (UID: \"1716b40b-a4a1-4374-b013-d7dc8cadf424\") " pod="openshift-marketplace/redhat-operators-8zd25" Oct 01 06:16:24 crc kubenswrapper[4661]: I1001 06:16:24.525029 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1716b40b-a4a1-4374-b013-d7dc8cadf424-utilities\") pod \"redhat-operators-8zd25\" (UID: \"1716b40b-a4a1-4374-b013-d7dc8cadf424\") " pod="openshift-marketplace/redhat-operators-8zd25" Oct 01 06:16:24 crc kubenswrapper[4661]: I1001 06:16:24.525193 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1716b40b-a4a1-4374-b013-d7dc8cadf424-catalog-content\") pod \"redhat-operators-8zd25\" (UID: \"1716b40b-a4a1-4374-b013-d7dc8cadf424\") " pod="openshift-marketplace/redhat-operators-8zd25" Oct 01 06:16:24 crc kubenswrapper[4661]: I1001 06:16:24.546807 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6b7k\" (UniqueName: \"kubernetes.io/projected/1716b40b-a4a1-4374-b013-d7dc8cadf424-kube-api-access-p6b7k\") pod \"redhat-operators-8zd25\" (UID: \"1716b40b-a4a1-4374-b013-d7dc8cadf424\") " pod="openshift-marketplace/redhat-operators-8zd25" Oct 01 06:16:24 crc kubenswrapper[4661]: I1001 06:16:24.615182 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8zd25" Oct 01 06:16:25 crc kubenswrapper[4661]: I1001 06:16:25.127061 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8zd25"] Oct 01 06:16:25 crc kubenswrapper[4661]: I1001 06:16:25.476045 4661 generic.go:334] "Generic (PLEG): container finished" podID="1716b40b-a4a1-4374-b013-d7dc8cadf424" containerID="784d7452d314079b91c14b82e5721a0362fce4295101cac4ceb70cf2e0a8abd1" exitCode=0 Oct 01 06:16:25 crc kubenswrapper[4661]: I1001 06:16:25.476095 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8zd25" event={"ID":"1716b40b-a4a1-4374-b013-d7dc8cadf424","Type":"ContainerDied","Data":"784d7452d314079b91c14b82e5721a0362fce4295101cac4ceb70cf2e0a8abd1"} Oct 01 06:16:25 crc kubenswrapper[4661]: I1001 06:16:25.476132 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8zd25" event={"ID":"1716b40b-a4a1-4374-b013-d7dc8cadf424","Type":"ContainerStarted","Data":"1f37dbb4b8bbb945f0e5b386077c32c8b29ef84f0c46a66b6478ceb3cae464da"} Oct 01 06:16:27 crc kubenswrapper[4661]: I1001 06:16:27.503009 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8zd25" event={"ID":"1716b40b-a4a1-4374-b013-d7dc8cadf424","Type":"ContainerStarted","Data":"7e11b7d30687cbda28fdf7f1cd4dfd10871d1c83b141b331ac094e220dc002d3"} Oct 01 06:16:30 crc kubenswrapper[4661]: I1001 06:16:30.552535 4661 generic.go:334] "Generic (PLEG): container finished" podID="1716b40b-a4a1-4374-b013-d7dc8cadf424" containerID="7e11b7d30687cbda28fdf7f1cd4dfd10871d1c83b141b331ac094e220dc002d3" exitCode=0 Oct 01 06:16:30 crc kubenswrapper[4661]: I1001 06:16:30.552683 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8zd25" event={"ID":"1716b40b-a4a1-4374-b013-d7dc8cadf424","Type":"ContainerDied","Data":"7e11b7d30687cbda28fdf7f1cd4dfd10871d1c83b141b331ac094e220dc002d3"} Oct 01 06:16:30 crc kubenswrapper[4661]: I1001 06:16:30.559439 4661 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 06:16:31 crc kubenswrapper[4661]: I1001 06:16:31.607305 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8zd25" event={"ID":"1716b40b-a4a1-4374-b013-d7dc8cadf424","Type":"ContainerStarted","Data":"4b114aef83742912d6375b1a9a3cfa4a309a3b785027877a674524dfba19dbbd"} Oct 01 06:16:31 crc kubenswrapper[4661]: I1001 06:16:31.631389 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8zd25" podStartSLOduration=1.869427768 podStartE2EDuration="7.631368279s" podCreationTimestamp="2025-10-01 06:16:24 +0000 UTC" firstStartedPulling="2025-10-01 06:16:25.478532739 +0000 UTC m=+2834.416511353" lastFinishedPulling="2025-10-01 06:16:31.24047324 +0000 UTC m=+2840.178451864" observedRunningTime="2025-10-01 06:16:31.6277308 +0000 UTC m=+2840.565709424" watchObservedRunningTime="2025-10-01 06:16:31.631368279 +0000 UTC m=+2840.569346903" Oct 01 06:16:34 crc kubenswrapper[4661]: I1001 06:16:34.310061 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:16:34 crc kubenswrapper[4661]: I1001 06:16:34.310650 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:16:34 crc kubenswrapper[4661]: I1001 06:16:34.616578 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8zd25" Oct 01 06:16:34 crc kubenswrapper[4661]: I1001 06:16:34.616701 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8zd25" Oct 01 06:16:35 crc kubenswrapper[4661]: I1001 06:16:35.664432 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-8zd25" podUID="1716b40b-a4a1-4374-b013-d7dc8cadf424" containerName="registry-server" probeResult="failure" output=< Oct 01 06:16:35 crc kubenswrapper[4661]: timeout: failed to connect service ":50051" within 1s Oct 01 06:16:35 crc kubenswrapper[4661]: > Oct 01 06:16:44 crc kubenswrapper[4661]: I1001 06:16:44.704616 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8zd25" Oct 01 06:16:44 crc kubenswrapper[4661]: I1001 06:16:44.777770 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8zd25" Oct 01 06:16:44 crc kubenswrapper[4661]: I1001 06:16:44.953860 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8zd25"] Oct 01 06:16:45 crc kubenswrapper[4661]: I1001 06:16:45.751465 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8zd25" podUID="1716b40b-a4a1-4374-b013-d7dc8cadf424" containerName="registry-server" containerID="cri-o://4b114aef83742912d6375b1a9a3cfa4a309a3b785027877a674524dfba19dbbd" gracePeriod=2 Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.266040 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8zd25" Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.406145 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1716b40b-a4a1-4374-b013-d7dc8cadf424-catalog-content\") pod \"1716b40b-a4a1-4374-b013-d7dc8cadf424\" (UID: \"1716b40b-a4a1-4374-b013-d7dc8cadf424\") " Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.406237 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1716b40b-a4a1-4374-b013-d7dc8cadf424-utilities\") pod \"1716b40b-a4a1-4374-b013-d7dc8cadf424\" (UID: \"1716b40b-a4a1-4374-b013-d7dc8cadf424\") " Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.406385 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6b7k\" (UniqueName: \"kubernetes.io/projected/1716b40b-a4a1-4374-b013-d7dc8cadf424-kube-api-access-p6b7k\") pod \"1716b40b-a4a1-4374-b013-d7dc8cadf424\" (UID: \"1716b40b-a4a1-4374-b013-d7dc8cadf424\") " Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.407602 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1716b40b-a4a1-4374-b013-d7dc8cadf424-utilities" (OuterVolumeSpecName: "utilities") pod "1716b40b-a4a1-4374-b013-d7dc8cadf424" (UID: "1716b40b-a4a1-4374-b013-d7dc8cadf424"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.424964 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1716b40b-a4a1-4374-b013-d7dc8cadf424-kube-api-access-p6b7k" (OuterVolumeSpecName: "kube-api-access-p6b7k") pod "1716b40b-a4a1-4374-b013-d7dc8cadf424" (UID: "1716b40b-a4a1-4374-b013-d7dc8cadf424"). InnerVolumeSpecName "kube-api-access-p6b7k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.508480 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1716b40b-a4a1-4374-b013-d7dc8cadf424-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.508509 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6b7k\" (UniqueName: \"kubernetes.io/projected/1716b40b-a4a1-4374-b013-d7dc8cadf424-kube-api-access-p6b7k\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.509176 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1716b40b-a4a1-4374-b013-d7dc8cadf424-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1716b40b-a4a1-4374-b013-d7dc8cadf424" (UID: "1716b40b-a4a1-4374-b013-d7dc8cadf424"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.610753 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1716b40b-a4a1-4374-b013-d7dc8cadf424-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.765591 4661 generic.go:334] "Generic (PLEG): container finished" podID="1716b40b-a4a1-4374-b013-d7dc8cadf424" containerID="4b114aef83742912d6375b1a9a3cfa4a309a3b785027877a674524dfba19dbbd" exitCode=0 Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.765678 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8zd25" event={"ID":"1716b40b-a4a1-4374-b013-d7dc8cadf424","Type":"ContainerDied","Data":"4b114aef83742912d6375b1a9a3cfa4a309a3b785027877a674524dfba19dbbd"} Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.765729 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8zd25" Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.765754 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8zd25" event={"ID":"1716b40b-a4a1-4374-b013-d7dc8cadf424","Type":"ContainerDied","Data":"1f37dbb4b8bbb945f0e5b386077c32c8b29ef84f0c46a66b6478ceb3cae464da"} Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.765781 4661 scope.go:117] "RemoveContainer" containerID="4b114aef83742912d6375b1a9a3cfa4a309a3b785027877a674524dfba19dbbd" Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.809772 4661 scope.go:117] "RemoveContainer" containerID="7e11b7d30687cbda28fdf7f1cd4dfd10871d1c83b141b331ac094e220dc002d3" Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.814208 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8zd25"] Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.825907 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8zd25"] Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.863246 4661 scope.go:117] "RemoveContainer" containerID="784d7452d314079b91c14b82e5721a0362fce4295101cac4ceb70cf2e0a8abd1" Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.896626 4661 scope.go:117] "RemoveContainer" containerID="4b114aef83742912d6375b1a9a3cfa4a309a3b785027877a674524dfba19dbbd" Oct 01 06:16:46 crc kubenswrapper[4661]: E1001 06:16:46.897357 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b114aef83742912d6375b1a9a3cfa4a309a3b785027877a674524dfba19dbbd\": container with ID starting with 4b114aef83742912d6375b1a9a3cfa4a309a3b785027877a674524dfba19dbbd not found: ID does not exist" containerID="4b114aef83742912d6375b1a9a3cfa4a309a3b785027877a674524dfba19dbbd" Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.897395 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b114aef83742912d6375b1a9a3cfa4a309a3b785027877a674524dfba19dbbd"} err="failed to get container status \"4b114aef83742912d6375b1a9a3cfa4a309a3b785027877a674524dfba19dbbd\": rpc error: code = NotFound desc = could not find container \"4b114aef83742912d6375b1a9a3cfa4a309a3b785027877a674524dfba19dbbd\": container with ID starting with 4b114aef83742912d6375b1a9a3cfa4a309a3b785027877a674524dfba19dbbd not found: ID does not exist" Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.897419 4661 scope.go:117] "RemoveContainer" containerID="7e11b7d30687cbda28fdf7f1cd4dfd10871d1c83b141b331ac094e220dc002d3" Oct 01 06:16:46 crc kubenswrapper[4661]: E1001 06:16:46.899061 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e11b7d30687cbda28fdf7f1cd4dfd10871d1c83b141b331ac094e220dc002d3\": container with ID starting with 7e11b7d30687cbda28fdf7f1cd4dfd10871d1c83b141b331ac094e220dc002d3 not found: ID does not exist" containerID="7e11b7d30687cbda28fdf7f1cd4dfd10871d1c83b141b331ac094e220dc002d3" Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.899107 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e11b7d30687cbda28fdf7f1cd4dfd10871d1c83b141b331ac094e220dc002d3"} err="failed to get container status \"7e11b7d30687cbda28fdf7f1cd4dfd10871d1c83b141b331ac094e220dc002d3\": rpc error: code = NotFound desc = could not find container \"7e11b7d30687cbda28fdf7f1cd4dfd10871d1c83b141b331ac094e220dc002d3\": container with ID starting with 7e11b7d30687cbda28fdf7f1cd4dfd10871d1c83b141b331ac094e220dc002d3 not found: ID does not exist" Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.899138 4661 scope.go:117] "RemoveContainer" containerID="784d7452d314079b91c14b82e5721a0362fce4295101cac4ceb70cf2e0a8abd1" Oct 01 06:16:46 crc kubenswrapper[4661]: E1001 06:16:46.899894 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"784d7452d314079b91c14b82e5721a0362fce4295101cac4ceb70cf2e0a8abd1\": container with ID starting with 784d7452d314079b91c14b82e5721a0362fce4295101cac4ceb70cf2e0a8abd1 not found: ID does not exist" containerID="784d7452d314079b91c14b82e5721a0362fce4295101cac4ceb70cf2e0a8abd1" Oct 01 06:16:46 crc kubenswrapper[4661]: I1001 06:16:46.899924 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"784d7452d314079b91c14b82e5721a0362fce4295101cac4ceb70cf2e0a8abd1"} err="failed to get container status \"784d7452d314079b91c14b82e5721a0362fce4295101cac4ceb70cf2e0a8abd1\": rpc error: code = NotFound desc = could not find container \"784d7452d314079b91c14b82e5721a0362fce4295101cac4ceb70cf2e0a8abd1\": container with ID starting with 784d7452d314079b91c14b82e5721a0362fce4295101cac4ceb70cf2e0a8abd1 not found: ID does not exist" Oct 01 06:16:47 crc kubenswrapper[4661]: I1001 06:16:47.777164 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1716b40b-a4a1-4374-b013-d7dc8cadf424" path="/var/lib/kubelet/pods/1716b40b-a4a1-4374-b013-d7dc8cadf424/volumes" Oct 01 06:16:55 crc kubenswrapper[4661]: I1001 06:16:55.605392 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dvd77"] Oct 01 06:16:55 crc kubenswrapper[4661]: E1001 06:16:55.607054 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1716b40b-a4a1-4374-b013-d7dc8cadf424" containerName="registry-server" Oct 01 06:16:55 crc kubenswrapper[4661]: I1001 06:16:55.607075 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="1716b40b-a4a1-4374-b013-d7dc8cadf424" containerName="registry-server" Oct 01 06:16:55 crc kubenswrapper[4661]: E1001 06:16:55.607110 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1716b40b-a4a1-4374-b013-d7dc8cadf424" containerName="extract-content" Oct 01 06:16:55 crc kubenswrapper[4661]: I1001 06:16:55.607117 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="1716b40b-a4a1-4374-b013-d7dc8cadf424" containerName="extract-content" Oct 01 06:16:55 crc kubenswrapper[4661]: E1001 06:16:55.607178 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1716b40b-a4a1-4374-b013-d7dc8cadf424" containerName="extract-utilities" Oct 01 06:16:55 crc kubenswrapper[4661]: I1001 06:16:55.607185 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="1716b40b-a4a1-4374-b013-d7dc8cadf424" containerName="extract-utilities" Oct 01 06:16:55 crc kubenswrapper[4661]: I1001 06:16:55.607412 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="1716b40b-a4a1-4374-b013-d7dc8cadf424" containerName="registry-server" Oct 01 06:16:55 crc kubenswrapper[4661]: I1001 06:16:55.609019 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dvd77" Oct 01 06:16:55 crc kubenswrapper[4661]: I1001 06:16:55.626017 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dvd77"] Oct 01 06:16:55 crc kubenswrapper[4661]: I1001 06:16:55.704197 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nm4x\" (UniqueName: \"kubernetes.io/projected/05da173d-e38a-42e7-8dfd-0b7acb56f065-kube-api-access-6nm4x\") pod \"certified-operators-dvd77\" (UID: \"05da173d-e38a-42e7-8dfd-0b7acb56f065\") " pod="openshift-marketplace/certified-operators-dvd77" Oct 01 06:16:55 crc kubenswrapper[4661]: I1001 06:16:55.704250 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05da173d-e38a-42e7-8dfd-0b7acb56f065-utilities\") pod \"certified-operators-dvd77\" (UID: \"05da173d-e38a-42e7-8dfd-0b7acb56f065\") " pod="openshift-marketplace/certified-operators-dvd77" Oct 01 06:16:55 crc kubenswrapper[4661]: I1001 06:16:55.704280 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05da173d-e38a-42e7-8dfd-0b7acb56f065-catalog-content\") pod \"certified-operators-dvd77\" (UID: \"05da173d-e38a-42e7-8dfd-0b7acb56f065\") " pod="openshift-marketplace/certified-operators-dvd77" Oct 01 06:16:55 crc kubenswrapper[4661]: I1001 06:16:55.806196 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05da173d-e38a-42e7-8dfd-0b7acb56f065-catalog-content\") pod \"certified-operators-dvd77\" (UID: \"05da173d-e38a-42e7-8dfd-0b7acb56f065\") " pod="openshift-marketplace/certified-operators-dvd77" Oct 01 06:16:55 crc kubenswrapper[4661]: I1001 06:16:55.806765 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05da173d-e38a-42e7-8dfd-0b7acb56f065-catalog-content\") pod \"certified-operators-dvd77\" (UID: \"05da173d-e38a-42e7-8dfd-0b7acb56f065\") " pod="openshift-marketplace/certified-operators-dvd77" Oct 01 06:16:55 crc kubenswrapper[4661]: I1001 06:16:55.807882 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nm4x\" (UniqueName: \"kubernetes.io/projected/05da173d-e38a-42e7-8dfd-0b7acb56f065-kube-api-access-6nm4x\") pod \"certified-operators-dvd77\" (UID: \"05da173d-e38a-42e7-8dfd-0b7acb56f065\") " pod="openshift-marketplace/certified-operators-dvd77" Oct 01 06:16:55 crc kubenswrapper[4661]: I1001 06:16:55.807947 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05da173d-e38a-42e7-8dfd-0b7acb56f065-utilities\") pod \"certified-operators-dvd77\" (UID: \"05da173d-e38a-42e7-8dfd-0b7acb56f065\") " pod="openshift-marketplace/certified-operators-dvd77" Oct 01 06:16:55 crc kubenswrapper[4661]: I1001 06:16:55.808302 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05da173d-e38a-42e7-8dfd-0b7acb56f065-utilities\") pod \"certified-operators-dvd77\" (UID: \"05da173d-e38a-42e7-8dfd-0b7acb56f065\") " pod="openshift-marketplace/certified-operators-dvd77" Oct 01 06:16:55 crc kubenswrapper[4661]: I1001 06:16:55.833831 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nm4x\" (UniqueName: \"kubernetes.io/projected/05da173d-e38a-42e7-8dfd-0b7acb56f065-kube-api-access-6nm4x\") pod \"certified-operators-dvd77\" (UID: \"05da173d-e38a-42e7-8dfd-0b7acb56f065\") " pod="openshift-marketplace/certified-operators-dvd77" Oct 01 06:16:55 crc kubenswrapper[4661]: I1001 06:16:55.927255 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dvd77" Oct 01 06:16:56 crc kubenswrapper[4661]: I1001 06:16:56.458851 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dvd77"] Oct 01 06:16:56 crc kubenswrapper[4661]: I1001 06:16:56.909019 4661 generic.go:334] "Generic (PLEG): container finished" podID="05da173d-e38a-42e7-8dfd-0b7acb56f065" containerID="aff6b891b064cdb4c5da37aacf37d5e0336d7aeca002546474ff67df3b120fac" exitCode=0 Oct 01 06:16:56 crc kubenswrapper[4661]: I1001 06:16:56.909158 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dvd77" event={"ID":"05da173d-e38a-42e7-8dfd-0b7acb56f065","Type":"ContainerDied","Data":"aff6b891b064cdb4c5da37aacf37d5e0336d7aeca002546474ff67df3b120fac"} Oct 01 06:16:56 crc kubenswrapper[4661]: I1001 06:16:56.909372 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dvd77" event={"ID":"05da173d-e38a-42e7-8dfd-0b7acb56f065","Type":"ContainerStarted","Data":"c8832530b76f273d9e710d1b2d42d2a76e637c236f24c4a7e26129d84d083683"} Oct 01 06:16:58 crc kubenswrapper[4661]: I1001 06:16:58.953423 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dvd77" event={"ID":"05da173d-e38a-42e7-8dfd-0b7acb56f065","Type":"ContainerStarted","Data":"cf52acb8e5aa7240385e10e66ef3983afe3590c502a09abf130a96af512e8036"} Oct 01 06:16:59 crc kubenswrapper[4661]: I1001 06:16:59.962908 4661 generic.go:334] "Generic (PLEG): container finished" podID="05da173d-e38a-42e7-8dfd-0b7acb56f065" containerID="cf52acb8e5aa7240385e10e66ef3983afe3590c502a09abf130a96af512e8036" exitCode=0 Oct 01 06:16:59 crc kubenswrapper[4661]: I1001 06:16:59.963017 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dvd77" event={"ID":"05da173d-e38a-42e7-8dfd-0b7acb56f065","Type":"ContainerDied","Data":"cf52acb8e5aa7240385e10e66ef3983afe3590c502a09abf130a96af512e8036"} Oct 01 06:17:00 crc kubenswrapper[4661]: I1001 06:17:00.981255 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dvd77" event={"ID":"05da173d-e38a-42e7-8dfd-0b7acb56f065","Type":"ContainerStarted","Data":"3ed8b93c16debf762db124d88a56d3a6cf2082b67342791136d7664afd9a0525"} Oct 01 06:17:01 crc kubenswrapper[4661]: I1001 06:17:01.013233 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dvd77" podStartSLOduration=2.564393242 podStartE2EDuration="6.013211013s" podCreationTimestamp="2025-10-01 06:16:55 +0000 UTC" firstStartedPulling="2025-10-01 06:16:56.916433481 +0000 UTC m=+2865.854412135" lastFinishedPulling="2025-10-01 06:17:00.365251282 +0000 UTC m=+2869.303229906" observedRunningTime="2025-10-01 06:17:01.003089086 +0000 UTC m=+2869.941067700" watchObservedRunningTime="2025-10-01 06:17:01.013211013 +0000 UTC m=+2869.951189637" Oct 01 06:17:04 crc kubenswrapper[4661]: I1001 06:17:04.309797 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:17:04 crc kubenswrapper[4661]: I1001 06:17:04.311204 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:17:05 crc kubenswrapper[4661]: I1001 06:17:05.562075 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-x6vxq"] Oct 01 06:17:05 crc kubenswrapper[4661]: I1001 06:17:05.566828 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x6vxq" Oct 01 06:17:05 crc kubenswrapper[4661]: I1001 06:17:05.579857 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-x6vxq"] Oct 01 06:17:05 crc kubenswrapper[4661]: I1001 06:17:05.626199 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ee01ad9-84bb-4ec7-a002-d04e59632e7c-catalog-content\") pod \"redhat-marketplace-x6vxq\" (UID: \"2ee01ad9-84bb-4ec7-a002-d04e59632e7c\") " pod="openshift-marketplace/redhat-marketplace-x6vxq" Oct 01 06:17:05 crc kubenswrapper[4661]: I1001 06:17:05.626570 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-md92z\" (UniqueName: \"kubernetes.io/projected/2ee01ad9-84bb-4ec7-a002-d04e59632e7c-kube-api-access-md92z\") pod \"redhat-marketplace-x6vxq\" (UID: \"2ee01ad9-84bb-4ec7-a002-d04e59632e7c\") " pod="openshift-marketplace/redhat-marketplace-x6vxq" Oct 01 06:17:05 crc kubenswrapper[4661]: I1001 06:17:05.627470 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ee01ad9-84bb-4ec7-a002-d04e59632e7c-utilities\") pod \"redhat-marketplace-x6vxq\" (UID: \"2ee01ad9-84bb-4ec7-a002-d04e59632e7c\") " pod="openshift-marketplace/redhat-marketplace-x6vxq" Oct 01 06:17:05 crc kubenswrapper[4661]: I1001 06:17:05.729653 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ee01ad9-84bb-4ec7-a002-d04e59632e7c-catalog-content\") pod \"redhat-marketplace-x6vxq\" (UID: \"2ee01ad9-84bb-4ec7-a002-d04e59632e7c\") " pod="openshift-marketplace/redhat-marketplace-x6vxq" Oct 01 06:17:05 crc kubenswrapper[4661]: I1001 06:17:05.730091 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-md92z\" (UniqueName: \"kubernetes.io/projected/2ee01ad9-84bb-4ec7-a002-d04e59632e7c-kube-api-access-md92z\") pod \"redhat-marketplace-x6vxq\" (UID: \"2ee01ad9-84bb-4ec7-a002-d04e59632e7c\") " pod="openshift-marketplace/redhat-marketplace-x6vxq" Oct 01 06:17:05 crc kubenswrapper[4661]: I1001 06:17:05.730322 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ee01ad9-84bb-4ec7-a002-d04e59632e7c-utilities\") pod \"redhat-marketplace-x6vxq\" (UID: \"2ee01ad9-84bb-4ec7-a002-d04e59632e7c\") " pod="openshift-marketplace/redhat-marketplace-x6vxq" Oct 01 06:17:05 crc kubenswrapper[4661]: I1001 06:17:05.730153 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ee01ad9-84bb-4ec7-a002-d04e59632e7c-catalog-content\") pod \"redhat-marketplace-x6vxq\" (UID: \"2ee01ad9-84bb-4ec7-a002-d04e59632e7c\") " pod="openshift-marketplace/redhat-marketplace-x6vxq" Oct 01 06:17:05 crc kubenswrapper[4661]: I1001 06:17:05.730755 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ee01ad9-84bb-4ec7-a002-d04e59632e7c-utilities\") pod \"redhat-marketplace-x6vxq\" (UID: \"2ee01ad9-84bb-4ec7-a002-d04e59632e7c\") " pod="openshift-marketplace/redhat-marketplace-x6vxq" Oct 01 06:17:05 crc kubenswrapper[4661]: I1001 06:17:05.757015 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-md92z\" (UniqueName: \"kubernetes.io/projected/2ee01ad9-84bb-4ec7-a002-d04e59632e7c-kube-api-access-md92z\") pod \"redhat-marketplace-x6vxq\" (UID: \"2ee01ad9-84bb-4ec7-a002-d04e59632e7c\") " pod="openshift-marketplace/redhat-marketplace-x6vxq" Oct 01 06:17:05 crc kubenswrapper[4661]: I1001 06:17:05.893964 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x6vxq" Oct 01 06:17:05 crc kubenswrapper[4661]: I1001 06:17:05.927787 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dvd77" Oct 01 06:17:05 crc kubenswrapper[4661]: I1001 06:17:05.927900 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dvd77" Oct 01 06:17:06 crc kubenswrapper[4661]: I1001 06:17:06.020608 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dvd77" Oct 01 06:17:06 crc kubenswrapper[4661]: I1001 06:17:06.115691 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dvd77" Oct 01 06:17:06 crc kubenswrapper[4661]: I1001 06:17:06.400678 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-x6vxq"] Oct 01 06:17:07 crc kubenswrapper[4661]: I1001 06:17:07.061222 4661 generic.go:334] "Generic (PLEG): container finished" podID="2ee01ad9-84bb-4ec7-a002-d04e59632e7c" containerID="378f5a31d75ca663d81934cc294b659248be76f3a477b5aa4383ae2f6099982d" exitCode=0 Oct 01 06:17:07 crc kubenswrapper[4661]: I1001 06:17:07.061303 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x6vxq" event={"ID":"2ee01ad9-84bb-4ec7-a002-d04e59632e7c","Type":"ContainerDied","Data":"378f5a31d75ca663d81934cc294b659248be76f3a477b5aa4383ae2f6099982d"} Oct 01 06:17:07 crc kubenswrapper[4661]: I1001 06:17:07.061778 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x6vxq" event={"ID":"2ee01ad9-84bb-4ec7-a002-d04e59632e7c","Type":"ContainerStarted","Data":"a8df8cce903e2938a7c5936add0ce6dbf8329ffc8f85ca96c1ad6c22cdd9b95a"} Oct 01 06:17:08 crc kubenswrapper[4661]: I1001 06:17:08.076725 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x6vxq" event={"ID":"2ee01ad9-84bb-4ec7-a002-d04e59632e7c","Type":"ContainerStarted","Data":"322b494428cb0a44cdf4885d57ec3deaceccf5086d0540722cf74a2e90a5332a"} Oct 01 06:17:08 crc kubenswrapper[4661]: I1001 06:17:08.329486 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dvd77"] Oct 01 06:17:09 crc kubenswrapper[4661]: I1001 06:17:09.091594 4661 generic.go:334] "Generic (PLEG): container finished" podID="2ee01ad9-84bb-4ec7-a002-d04e59632e7c" containerID="322b494428cb0a44cdf4885d57ec3deaceccf5086d0540722cf74a2e90a5332a" exitCode=0 Oct 01 06:17:09 crc kubenswrapper[4661]: I1001 06:17:09.091719 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x6vxq" event={"ID":"2ee01ad9-84bb-4ec7-a002-d04e59632e7c","Type":"ContainerDied","Data":"322b494428cb0a44cdf4885d57ec3deaceccf5086d0540722cf74a2e90a5332a"} Oct 01 06:17:09 crc kubenswrapper[4661]: I1001 06:17:09.092241 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dvd77" podUID="05da173d-e38a-42e7-8dfd-0b7acb56f065" containerName="registry-server" containerID="cri-o://3ed8b93c16debf762db124d88a56d3a6cf2082b67342791136d7664afd9a0525" gracePeriod=2 Oct 01 06:17:10 crc kubenswrapper[4661]: I1001 06:17:10.105265 4661 generic.go:334] "Generic (PLEG): container finished" podID="05da173d-e38a-42e7-8dfd-0b7acb56f065" containerID="3ed8b93c16debf762db124d88a56d3a6cf2082b67342791136d7664afd9a0525" exitCode=0 Oct 01 06:17:10 crc kubenswrapper[4661]: I1001 06:17:10.105356 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dvd77" event={"ID":"05da173d-e38a-42e7-8dfd-0b7acb56f065","Type":"ContainerDied","Data":"3ed8b93c16debf762db124d88a56d3a6cf2082b67342791136d7664afd9a0525"} Oct 01 06:17:10 crc kubenswrapper[4661]: I1001 06:17:10.201307 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dvd77" Oct 01 06:17:10 crc kubenswrapper[4661]: I1001 06:17:10.276499 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6nm4x\" (UniqueName: \"kubernetes.io/projected/05da173d-e38a-42e7-8dfd-0b7acb56f065-kube-api-access-6nm4x\") pod \"05da173d-e38a-42e7-8dfd-0b7acb56f065\" (UID: \"05da173d-e38a-42e7-8dfd-0b7acb56f065\") " Oct 01 06:17:10 crc kubenswrapper[4661]: I1001 06:17:10.276659 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05da173d-e38a-42e7-8dfd-0b7acb56f065-utilities\") pod \"05da173d-e38a-42e7-8dfd-0b7acb56f065\" (UID: \"05da173d-e38a-42e7-8dfd-0b7acb56f065\") " Oct 01 06:17:10 crc kubenswrapper[4661]: I1001 06:17:10.276717 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05da173d-e38a-42e7-8dfd-0b7acb56f065-catalog-content\") pod \"05da173d-e38a-42e7-8dfd-0b7acb56f065\" (UID: \"05da173d-e38a-42e7-8dfd-0b7acb56f065\") " Oct 01 06:17:10 crc kubenswrapper[4661]: I1001 06:17:10.283046 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05da173d-e38a-42e7-8dfd-0b7acb56f065-utilities" (OuterVolumeSpecName: "utilities") pod "05da173d-e38a-42e7-8dfd-0b7acb56f065" (UID: "05da173d-e38a-42e7-8dfd-0b7acb56f065"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:17:10 crc kubenswrapper[4661]: I1001 06:17:10.289613 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05da173d-e38a-42e7-8dfd-0b7acb56f065-kube-api-access-6nm4x" (OuterVolumeSpecName: "kube-api-access-6nm4x") pod "05da173d-e38a-42e7-8dfd-0b7acb56f065" (UID: "05da173d-e38a-42e7-8dfd-0b7acb56f065"). InnerVolumeSpecName "kube-api-access-6nm4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:17:10 crc kubenswrapper[4661]: I1001 06:17:10.333585 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05da173d-e38a-42e7-8dfd-0b7acb56f065-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "05da173d-e38a-42e7-8dfd-0b7acb56f065" (UID: "05da173d-e38a-42e7-8dfd-0b7acb56f065"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:17:10 crc kubenswrapper[4661]: I1001 06:17:10.381075 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6nm4x\" (UniqueName: \"kubernetes.io/projected/05da173d-e38a-42e7-8dfd-0b7acb56f065-kube-api-access-6nm4x\") on node \"crc\" DevicePath \"\"" Oct 01 06:17:10 crc kubenswrapper[4661]: I1001 06:17:10.381119 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05da173d-e38a-42e7-8dfd-0b7acb56f065-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:17:10 crc kubenswrapper[4661]: I1001 06:17:10.381188 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05da173d-e38a-42e7-8dfd-0b7acb56f065-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:17:11 crc kubenswrapper[4661]: I1001 06:17:11.117040 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dvd77" event={"ID":"05da173d-e38a-42e7-8dfd-0b7acb56f065","Type":"ContainerDied","Data":"c8832530b76f273d9e710d1b2d42d2a76e637c236f24c4a7e26129d84d083683"} Oct 01 06:17:11 crc kubenswrapper[4661]: I1001 06:17:11.117385 4661 scope.go:117] "RemoveContainer" containerID="3ed8b93c16debf762db124d88a56d3a6cf2082b67342791136d7664afd9a0525" Oct 01 06:17:11 crc kubenswrapper[4661]: I1001 06:17:11.117135 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dvd77" Oct 01 06:17:11 crc kubenswrapper[4661]: I1001 06:17:11.119748 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x6vxq" event={"ID":"2ee01ad9-84bb-4ec7-a002-d04e59632e7c","Type":"ContainerStarted","Data":"cd7a100270d3484e33fc3db80dcdac267bfe8be6292656fe7e4f77fb19d7bf3d"} Oct 01 06:17:11 crc kubenswrapper[4661]: I1001 06:17:11.141393 4661 scope.go:117] "RemoveContainer" containerID="cf52acb8e5aa7240385e10e66ef3983afe3590c502a09abf130a96af512e8036" Oct 01 06:17:11 crc kubenswrapper[4661]: I1001 06:17:11.145341 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-x6vxq" podStartSLOduration=3.005812634 podStartE2EDuration="6.14532469s" podCreationTimestamp="2025-10-01 06:17:05 +0000 UTC" firstStartedPulling="2025-10-01 06:17:07.065691296 +0000 UTC m=+2876.003669920" lastFinishedPulling="2025-10-01 06:17:10.205203352 +0000 UTC m=+2879.143181976" observedRunningTime="2025-10-01 06:17:11.142313988 +0000 UTC m=+2880.080292602" watchObservedRunningTime="2025-10-01 06:17:11.14532469 +0000 UTC m=+2880.083303314" Oct 01 06:17:11 crc kubenswrapper[4661]: I1001 06:17:11.170294 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dvd77"] Oct 01 06:17:11 crc kubenswrapper[4661]: I1001 06:17:11.177227 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dvd77"] Oct 01 06:17:11 crc kubenswrapper[4661]: I1001 06:17:11.187239 4661 scope.go:117] "RemoveContainer" containerID="aff6b891b064cdb4c5da37aacf37d5e0336d7aeca002546474ff67df3b120fac" Oct 01 06:17:11 crc kubenswrapper[4661]: I1001 06:17:11.777826 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05da173d-e38a-42e7-8dfd-0b7acb56f065" path="/var/lib/kubelet/pods/05da173d-e38a-42e7-8dfd-0b7acb56f065/volumes" Oct 01 06:17:15 crc kubenswrapper[4661]: I1001 06:17:15.895080 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-x6vxq" Oct 01 06:17:15 crc kubenswrapper[4661]: I1001 06:17:15.895657 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-x6vxq" Oct 01 06:17:15 crc kubenswrapper[4661]: I1001 06:17:15.962077 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-x6vxq" Oct 01 06:17:16 crc kubenswrapper[4661]: I1001 06:17:16.272266 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-x6vxq" Oct 01 06:17:16 crc kubenswrapper[4661]: I1001 06:17:16.321953 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-x6vxq"] Oct 01 06:17:18 crc kubenswrapper[4661]: I1001 06:17:18.209254 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-x6vxq" podUID="2ee01ad9-84bb-4ec7-a002-d04e59632e7c" containerName="registry-server" containerID="cri-o://cd7a100270d3484e33fc3db80dcdac267bfe8be6292656fe7e4f77fb19d7bf3d" gracePeriod=2 Oct 01 06:17:18 crc kubenswrapper[4661]: I1001 06:17:18.700414 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x6vxq" Oct 01 06:17:18 crc kubenswrapper[4661]: I1001 06:17:18.856467 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-md92z\" (UniqueName: \"kubernetes.io/projected/2ee01ad9-84bb-4ec7-a002-d04e59632e7c-kube-api-access-md92z\") pod \"2ee01ad9-84bb-4ec7-a002-d04e59632e7c\" (UID: \"2ee01ad9-84bb-4ec7-a002-d04e59632e7c\") " Oct 01 06:17:18 crc kubenswrapper[4661]: I1001 06:17:18.856794 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ee01ad9-84bb-4ec7-a002-d04e59632e7c-utilities\") pod \"2ee01ad9-84bb-4ec7-a002-d04e59632e7c\" (UID: \"2ee01ad9-84bb-4ec7-a002-d04e59632e7c\") " Oct 01 06:17:18 crc kubenswrapper[4661]: I1001 06:17:18.856915 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ee01ad9-84bb-4ec7-a002-d04e59632e7c-catalog-content\") pod \"2ee01ad9-84bb-4ec7-a002-d04e59632e7c\" (UID: \"2ee01ad9-84bb-4ec7-a002-d04e59632e7c\") " Oct 01 06:17:18 crc kubenswrapper[4661]: I1001 06:17:18.857784 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ee01ad9-84bb-4ec7-a002-d04e59632e7c-utilities" (OuterVolumeSpecName: "utilities") pod "2ee01ad9-84bb-4ec7-a002-d04e59632e7c" (UID: "2ee01ad9-84bb-4ec7-a002-d04e59632e7c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:17:18 crc kubenswrapper[4661]: I1001 06:17:18.866822 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ee01ad9-84bb-4ec7-a002-d04e59632e7c-kube-api-access-md92z" (OuterVolumeSpecName: "kube-api-access-md92z") pod "2ee01ad9-84bb-4ec7-a002-d04e59632e7c" (UID: "2ee01ad9-84bb-4ec7-a002-d04e59632e7c"). InnerVolumeSpecName "kube-api-access-md92z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:17:18 crc kubenswrapper[4661]: I1001 06:17:18.878968 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ee01ad9-84bb-4ec7-a002-d04e59632e7c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2ee01ad9-84bb-4ec7-a002-d04e59632e7c" (UID: "2ee01ad9-84bb-4ec7-a002-d04e59632e7c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:17:18 crc kubenswrapper[4661]: I1001 06:17:18.959495 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-md92z\" (UniqueName: \"kubernetes.io/projected/2ee01ad9-84bb-4ec7-a002-d04e59632e7c-kube-api-access-md92z\") on node \"crc\" DevicePath \"\"" Oct 01 06:17:18 crc kubenswrapper[4661]: I1001 06:17:18.959556 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ee01ad9-84bb-4ec7-a002-d04e59632e7c-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:17:18 crc kubenswrapper[4661]: I1001 06:17:18.959580 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ee01ad9-84bb-4ec7-a002-d04e59632e7c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:17:19 crc kubenswrapper[4661]: I1001 06:17:19.225460 4661 generic.go:334] "Generic (PLEG): container finished" podID="2ee01ad9-84bb-4ec7-a002-d04e59632e7c" containerID="cd7a100270d3484e33fc3db80dcdac267bfe8be6292656fe7e4f77fb19d7bf3d" exitCode=0 Oct 01 06:17:19 crc kubenswrapper[4661]: I1001 06:17:19.225508 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x6vxq" event={"ID":"2ee01ad9-84bb-4ec7-a002-d04e59632e7c","Type":"ContainerDied","Data":"cd7a100270d3484e33fc3db80dcdac267bfe8be6292656fe7e4f77fb19d7bf3d"} Oct 01 06:17:19 crc kubenswrapper[4661]: I1001 06:17:19.225545 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x6vxq" event={"ID":"2ee01ad9-84bb-4ec7-a002-d04e59632e7c","Type":"ContainerDied","Data":"a8df8cce903e2938a7c5936add0ce6dbf8329ffc8f85ca96c1ad6c22cdd9b95a"} Oct 01 06:17:19 crc kubenswrapper[4661]: I1001 06:17:19.225569 4661 scope.go:117] "RemoveContainer" containerID="cd7a100270d3484e33fc3db80dcdac267bfe8be6292656fe7e4f77fb19d7bf3d" Oct 01 06:17:19 crc kubenswrapper[4661]: I1001 06:17:19.225596 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x6vxq" Oct 01 06:17:19 crc kubenswrapper[4661]: I1001 06:17:19.258061 4661 scope.go:117] "RemoveContainer" containerID="322b494428cb0a44cdf4885d57ec3deaceccf5086d0540722cf74a2e90a5332a" Oct 01 06:17:19 crc kubenswrapper[4661]: I1001 06:17:19.275843 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-x6vxq"] Oct 01 06:17:19 crc kubenswrapper[4661]: I1001 06:17:19.288539 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-x6vxq"] Oct 01 06:17:19 crc kubenswrapper[4661]: I1001 06:17:19.296701 4661 scope.go:117] "RemoveContainer" containerID="378f5a31d75ca663d81934cc294b659248be76f3a477b5aa4383ae2f6099982d" Oct 01 06:17:19 crc kubenswrapper[4661]: I1001 06:17:19.384085 4661 scope.go:117] "RemoveContainer" containerID="cd7a100270d3484e33fc3db80dcdac267bfe8be6292656fe7e4f77fb19d7bf3d" Oct 01 06:17:19 crc kubenswrapper[4661]: E1001 06:17:19.385535 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd7a100270d3484e33fc3db80dcdac267bfe8be6292656fe7e4f77fb19d7bf3d\": container with ID starting with cd7a100270d3484e33fc3db80dcdac267bfe8be6292656fe7e4f77fb19d7bf3d not found: ID does not exist" containerID="cd7a100270d3484e33fc3db80dcdac267bfe8be6292656fe7e4f77fb19d7bf3d" Oct 01 06:17:19 crc kubenswrapper[4661]: I1001 06:17:19.385567 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd7a100270d3484e33fc3db80dcdac267bfe8be6292656fe7e4f77fb19d7bf3d"} err="failed to get container status \"cd7a100270d3484e33fc3db80dcdac267bfe8be6292656fe7e4f77fb19d7bf3d\": rpc error: code = NotFound desc = could not find container \"cd7a100270d3484e33fc3db80dcdac267bfe8be6292656fe7e4f77fb19d7bf3d\": container with ID starting with cd7a100270d3484e33fc3db80dcdac267bfe8be6292656fe7e4f77fb19d7bf3d not found: ID does not exist" Oct 01 06:17:19 crc kubenswrapper[4661]: I1001 06:17:19.385587 4661 scope.go:117] "RemoveContainer" containerID="322b494428cb0a44cdf4885d57ec3deaceccf5086d0540722cf74a2e90a5332a" Oct 01 06:17:19 crc kubenswrapper[4661]: E1001 06:17:19.387792 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"322b494428cb0a44cdf4885d57ec3deaceccf5086d0540722cf74a2e90a5332a\": container with ID starting with 322b494428cb0a44cdf4885d57ec3deaceccf5086d0540722cf74a2e90a5332a not found: ID does not exist" containerID="322b494428cb0a44cdf4885d57ec3deaceccf5086d0540722cf74a2e90a5332a" Oct 01 06:17:19 crc kubenswrapper[4661]: I1001 06:17:19.387818 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"322b494428cb0a44cdf4885d57ec3deaceccf5086d0540722cf74a2e90a5332a"} err="failed to get container status \"322b494428cb0a44cdf4885d57ec3deaceccf5086d0540722cf74a2e90a5332a\": rpc error: code = NotFound desc = could not find container \"322b494428cb0a44cdf4885d57ec3deaceccf5086d0540722cf74a2e90a5332a\": container with ID starting with 322b494428cb0a44cdf4885d57ec3deaceccf5086d0540722cf74a2e90a5332a not found: ID does not exist" Oct 01 06:17:19 crc kubenswrapper[4661]: I1001 06:17:19.387834 4661 scope.go:117] "RemoveContainer" containerID="378f5a31d75ca663d81934cc294b659248be76f3a477b5aa4383ae2f6099982d" Oct 01 06:17:19 crc kubenswrapper[4661]: E1001 06:17:19.391718 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"378f5a31d75ca663d81934cc294b659248be76f3a477b5aa4383ae2f6099982d\": container with ID starting with 378f5a31d75ca663d81934cc294b659248be76f3a477b5aa4383ae2f6099982d not found: ID does not exist" containerID="378f5a31d75ca663d81934cc294b659248be76f3a477b5aa4383ae2f6099982d" Oct 01 06:17:19 crc kubenswrapper[4661]: I1001 06:17:19.391748 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"378f5a31d75ca663d81934cc294b659248be76f3a477b5aa4383ae2f6099982d"} err="failed to get container status \"378f5a31d75ca663d81934cc294b659248be76f3a477b5aa4383ae2f6099982d\": rpc error: code = NotFound desc = could not find container \"378f5a31d75ca663d81934cc294b659248be76f3a477b5aa4383ae2f6099982d\": container with ID starting with 378f5a31d75ca663d81934cc294b659248be76f3a477b5aa4383ae2f6099982d not found: ID does not exist" Oct 01 06:17:19 crc kubenswrapper[4661]: I1001 06:17:19.775004 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ee01ad9-84bb-4ec7-a002-d04e59632e7c" path="/var/lib/kubelet/pods/2ee01ad9-84bb-4ec7-a002-d04e59632e7c/volumes" Oct 01 06:17:34 crc kubenswrapper[4661]: I1001 06:17:34.308984 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:17:34 crc kubenswrapper[4661]: I1001 06:17:34.309774 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:17:34 crc kubenswrapper[4661]: I1001 06:17:34.309846 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 06:17:34 crc kubenswrapper[4661]: I1001 06:17:34.310975 4661 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971"} pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 06:17:34 crc kubenswrapper[4661]: I1001 06:17:34.311098 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" containerID="cri-o://7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" gracePeriod=600 Oct 01 06:17:34 crc kubenswrapper[4661]: E1001 06:17:34.457422 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:17:35 crc kubenswrapper[4661]: I1001 06:17:35.433355 4661 generic.go:334] "Generic (PLEG): container finished" podID="7584c4bc-4202-487e-a2b4-4319f428a792" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" exitCode=0 Oct 01 06:17:35 crc kubenswrapper[4661]: I1001 06:17:35.433384 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerDied","Data":"7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971"} Oct 01 06:17:35 crc kubenswrapper[4661]: I1001 06:17:35.433761 4661 scope.go:117] "RemoveContainer" containerID="55135fa47d8d7e2b525c51e0ba1348ccb829754a9a8e2b8714e1ffd134bbfde4" Oct 01 06:17:35 crc kubenswrapper[4661]: I1001 06:17:35.434537 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:17:35 crc kubenswrapper[4661]: E1001 06:17:35.434981 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:17:46 crc kubenswrapper[4661]: I1001 06:17:46.758269 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:17:46 crc kubenswrapper[4661]: E1001 06:17:46.759738 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:18:00 crc kubenswrapper[4661]: I1001 06:18:00.757340 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:18:00 crc kubenswrapper[4661]: E1001 06:18:00.758630 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:18:12 crc kubenswrapper[4661]: I1001 06:18:12.757192 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:18:12 crc kubenswrapper[4661]: E1001 06:18:12.758486 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:18:26 crc kubenswrapper[4661]: I1001 06:18:26.071503 4661 generic.go:334] "Generic (PLEG): container finished" podID="e2078d83-8d53-4052-8b77-031948bc8705" containerID="23cef719f65ef2bd0b0f164ac093d603f9e00d25656b85df13b8daaaa6ff679c" exitCode=0 Oct 01 06:18:26 crc kubenswrapper[4661]: I1001 06:18:26.071609 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" event={"ID":"e2078d83-8d53-4052-8b77-031948bc8705","Type":"ContainerDied","Data":"23cef719f65ef2bd0b0f164ac093d603f9e00d25656b85df13b8daaaa6ff679c"} Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.634936 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.638958 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ceilometer-compute-config-data-0\") pod \"e2078d83-8d53-4052-8b77-031948bc8705\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.639130 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ssh-key\") pod \"e2078d83-8d53-4052-8b77-031948bc8705\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.639280 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4gqqj\" (UniqueName: \"kubernetes.io/projected/e2078d83-8d53-4052-8b77-031948bc8705-kube-api-access-4gqqj\") pod \"e2078d83-8d53-4052-8b77-031948bc8705\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.639346 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ceilometer-compute-config-data-1\") pod \"e2078d83-8d53-4052-8b77-031948bc8705\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.639504 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-telemetry-combined-ca-bundle\") pod \"e2078d83-8d53-4052-8b77-031948bc8705\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.639560 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ceilometer-compute-config-data-2\") pod \"e2078d83-8d53-4052-8b77-031948bc8705\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.639816 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-inventory\") pod \"e2078d83-8d53-4052-8b77-031948bc8705\" (UID: \"e2078d83-8d53-4052-8b77-031948bc8705\") " Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.646372 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "e2078d83-8d53-4052-8b77-031948bc8705" (UID: "e2078d83-8d53-4052-8b77-031948bc8705"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.651187 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2078d83-8d53-4052-8b77-031948bc8705-kube-api-access-4gqqj" (OuterVolumeSpecName: "kube-api-access-4gqqj") pod "e2078d83-8d53-4052-8b77-031948bc8705" (UID: "e2078d83-8d53-4052-8b77-031948bc8705"). InnerVolumeSpecName "kube-api-access-4gqqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.672100 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "e2078d83-8d53-4052-8b77-031948bc8705" (UID: "e2078d83-8d53-4052-8b77-031948bc8705"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.706753 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "e2078d83-8d53-4052-8b77-031948bc8705" (UID: "e2078d83-8d53-4052-8b77-031948bc8705"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.711083 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e2078d83-8d53-4052-8b77-031948bc8705" (UID: "e2078d83-8d53-4052-8b77-031948bc8705"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.711409 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "e2078d83-8d53-4052-8b77-031948bc8705" (UID: "e2078d83-8d53-4052-8b77-031948bc8705"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.713992 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-inventory" (OuterVolumeSpecName: "inventory") pod "e2078d83-8d53-4052-8b77-031948bc8705" (UID: "e2078d83-8d53-4052-8b77-031948bc8705"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.743693 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4gqqj\" (UniqueName: \"kubernetes.io/projected/e2078d83-8d53-4052-8b77-031948bc8705-kube-api-access-4gqqj\") on node \"crc\" DevicePath \"\"" Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.743746 4661 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.743770 4661 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.743790 4661 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.743810 4661 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.743833 4661 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.743855 4661 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e2078d83-8d53-4052-8b77-031948bc8705-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 06:18:27 crc kubenswrapper[4661]: I1001 06:18:27.757033 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:18:27 crc kubenswrapper[4661]: E1001 06:18:27.757514 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:18:28 crc kubenswrapper[4661]: I1001 06:18:28.107381 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" event={"ID":"e2078d83-8d53-4052-8b77-031948bc8705","Type":"ContainerDied","Data":"0d687289c3a32abb10416dbe1646efc4e48515efad40134793c3ab1699696152"} Oct 01 06:18:28 crc kubenswrapper[4661]: I1001 06:18:28.107443 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d687289c3a32abb10416dbe1646efc4e48515efad40134793c3ab1699696152" Oct 01 06:18:28 crc kubenswrapper[4661]: I1001 06:18:28.107473 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs" Oct 01 06:18:39 crc kubenswrapper[4661]: I1001 06:18:39.757839 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:18:39 crc kubenswrapper[4661]: E1001 06:18:39.759958 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:18:50 crc kubenswrapper[4661]: I1001 06:18:50.757520 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:18:50 crc kubenswrapper[4661]: E1001 06:18:50.759008 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:19:05 crc kubenswrapper[4661]: I1001 06:19:05.757749 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:19:05 crc kubenswrapper[4661]: E1001 06:19:05.758894 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.095257 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.096048 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="2dde6251-f26b-4291-931e-30fce08578fd" containerName="prometheus" containerID="cri-o://909afc94c98bc7d4a92f5cc7e00b6782ab53d5ed51a49b88f48b2132905022c2" gracePeriod=600 Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.096404 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="2dde6251-f26b-4291-931e-30fce08578fd" containerName="config-reloader" containerID="cri-o://c29be373f937e2652d05be92770fabdb937292978e266b1bb7e6206c7359ede2" gracePeriod=600 Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.096426 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="2dde6251-f26b-4291-931e-30fce08578fd" containerName="thanos-sidecar" containerID="cri-o://81134aefa5f7c53c22ec5c1fa244c34652df840a596107c5f3b37806c3cb6896" gracePeriod=600 Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.643527 4661 generic.go:334] "Generic (PLEG): container finished" podID="2dde6251-f26b-4291-931e-30fce08578fd" containerID="81134aefa5f7c53c22ec5c1fa244c34652df840a596107c5f3b37806c3cb6896" exitCode=0 Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.643913 4661 generic.go:334] "Generic (PLEG): container finished" podID="2dde6251-f26b-4291-931e-30fce08578fd" containerID="c29be373f937e2652d05be92770fabdb937292978e266b1bb7e6206c7359ede2" exitCode=0 Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.643924 4661 generic.go:334] "Generic (PLEG): container finished" podID="2dde6251-f26b-4291-931e-30fce08578fd" containerID="909afc94c98bc7d4a92f5cc7e00b6782ab53d5ed51a49b88f48b2132905022c2" exitCode=0 Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.643944 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2dde6251-f26b-4291-931e-30fce08578fd","Type":"ContainerDied","Data":"81134aefa5f7c53c22ec5c1fa244c34652df840a596107c5f3b37806c3cb6896"} Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.643968 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2dde6251-f26b-4291-931e-30fce08578fd","Type":"ContainerDied","Data":"c29be373f937e2652d05be92770fabdb937292978e266b1bb7e6206c7359ede2"} Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.643980 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2dde6251-f26b-4291-931e-30fce08578fd","Type":"ContainerDied","Data":"909afc94c98bc7d4a92f5cc7e00b6782ab53d5ed51a49b88f48b2132905022c2"} Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.778462 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.959122 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-config\") pod \"2dde6251-f26b-4291-931e-30fce08578fd\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.959182 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/2dde6251-f26b-4291-931e-30fce08578fd-prometheus-metric-storage-rulefiles-0\") pod \"2dde6251-f26b-4291-931e-30fce08578fd\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.959263 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-thanos-prometheus-http-client-file\") pod \"2dde6251-f26b-4291-931e-30fce08578fd\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.959307 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"2dde6251-f26b-4291-931e-30fce08578fd\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.959343 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-web-config\") pod \"2dde6251-f26b-4291-931e-30fce08578fd\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.959490 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70227665-af75-4dfe-9648-95f31cf3d818\") pod \"2dde6251-f26b-4291-931e-30fce08578fd\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.959553 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/2dde6251-f26b-4291-931e-30fce08578fd-config-out\") pod \"2dde6251-f26b-4291-931e-30fce08578fd\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.959602 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"2dde6251-f26b-4291-931e-30fce08578fd\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.959661 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/2dde6251-f26b-4291-931e-30fce08578fd-tls-assets\") pod \"2dde6251-f26b-4291-931e-30fce08578fd\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.959721 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hbtdv\" (UniqueName: \"kubernetes.io/projected/2dde6251-f26b-4291-931e-30fce08578fd-kube-api-access-hbtdv\") pod \"2dde6251-f26b-4291-931e-30fce08578fd\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.959775 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-secret-combined-ca-bundle\") pod \"2dde6251-f26b-4291-931e-30fce08578fd\" (UID: \"2dde6251-f26b-4291-931e-30fce08578fd\") " Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.961819 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2dde6251-f26b-4291-931e-30fce08578fd-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "2dde6251-f26b-4291-931e-30fce08578fd" (UID: "2dde6251-f26b-4291-931e-30fce08578fd"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.966210 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2dde6251-f26b-4291-931e-30fce08578fd-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "2dde6251-f26b-4291-931e-30fce08578fd" (UID: "2dde6251-f26b-4291-931e-30fce08578fd"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.967346 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2dde6251-f26b-4291-931e-30fce08578fd-config-out" (OuterVolumeSpecName: "config-out") pod "2dde6251-f26b-4291-931e-30fce08578fd" (UID: "2dde6251-f26b-4291-931e-30fce08578fd"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.968221 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d") pod "2dde6251-f26b-4291-931e-30fce08578fd" (UID: "2dde6251-f26b-4291-931e-30fce08578fd"). InnerVolumeSpecName "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.968356 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2dde6251-f26b-4291-931e-30fce08578fd-kube-api-access-hbtdv" (OuterVolumeSpecName: "kube-api-access-hbtdv") pod "2dde6251-f26b-4291-931e-30fce08578fd" (UID: "2dde6251-f26b-4291-931e-30fce08578fd"). InnerVolumeSpecName "kube-api-access-hbtdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.968443 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-config" (OuterVolumeSpecName: "config") pod "2dde6251-f26b-4291-931e-30fce08578fd" (UID: "2dde6251-f26b-4291-931e-30fce08578fd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.969109 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-secret-combined-ca-bundle" (OuterVolumeSpecName: "secret-combined-ca-bundle") pod "2dde6251-f26b-4291-931e-30fce08578fd" (UID: "2dde6251-f26b-4291-931e-30fce08578fd"). InnerVolumeSpecName "secret-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.970767 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d") pod "2dde6251-f26b-4291-931e-30fce08578fd" (UID: "2dde6251-f26b-4291-931e-30fce08578fd"). InnerVolumeSpecName "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:19:07 crc kubenswrapper[4661]: I1001 06:19:07.970959 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "2dde6251-f26b-4291-931e-30fce08578fd" (UID: "2dde6251-f26b-4291-931e-30fce08578fd"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.000922 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70227665-af75-4dfe-9648-95f31cf3d818" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "2dde6251-f26b-4291-931e-30fce08578fd" (UID: "2dde6251-f26b-4291-931e-30fce08578fd"). InnerVolumeSpecName "pvc-70227665-af75-4dfe-9648-95f31cf3d818". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.062034 4661 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/2dde6251-f26b-4291-931e-30fce08578fd-config-out\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.062079 4661 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.062095 4661 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/2dde6251-f26b-4291-931e-30fce08578fd-tls-assets\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.062110 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hbtdv\" (UniqueName: \"kubernetes.io/projected/2dde6251-f26b-4291-931e-30fce08578fd-kube-api-access-hbtdv\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.062121 4661 reconciler_common.go:293] "Volume detached for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-secret-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.062130 4661 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.062141 4661 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/2dde6251-f26b-4291-931e-30fce08578fd-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.062152 4661 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.062163 4661 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.062204 4661 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-70227665-af75-4dfe-9648-95f31cf3d818\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70227665-af75-4dfe-9648-95f31cf3d818\") on node \"crc\" " Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.076713 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-web-config" (OuterVolumeSpecName: "web-config") pod "2dde6251-f26b-4291-931e-30fce08578fd" (UID: "2dde6251-f26b-4291-931e-30fce08578fd"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.103496 4661 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.103659 4661 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-70227665-af75-4dfe-9648-95f31cf3d818" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70227665-af75-4dfe-9648-95f31cf3d818") on node "crc" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.163646 4661 reconciler_common.go:293] "Volume detached for volume \"pvc-70227665-af75-4dfe-9648-95f31cf3d818\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70227665-af75-4dfe-9648-95f31cf3d818\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.163680 4661 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/2dde6251-f26b-4291-931e-30fce08578fd-web-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.659980 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2dde6251-f26b-4291-931e-30fce08578fd","Type":"ContainerDied","Data":"10d6b719d61b5ab549f31fbb616c89cb907d163fcb8468afc01c25bdc17016d8"} Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.660048 4661 scope.go:117] "RemoveContainer" containerID="81134aefa5f7c53c22ec5c1fa244c34652df840a596107c5f3b37806c3cb6896" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.660070 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.693252 4661 scope.go:117] "RemoveContainer" containerID="c29be373f937e2652d05be92770fabdb937292978e266b1bb7e6206c7359ede2" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.707514 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.719742 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.732150 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.747206 4661 scope.go:117] "RemoveContainer" containerID="909afc94c98bc7d4a92f5cc7e00b6782ab53d5ed51a49b88f48b2132905022c2" Oct 01 06:19:08 crc kubenswrapper[4661]: E1001 06:19:08.752220 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2dde6251-f26b-4291-931e-30fce08578fd" containerName="prometheus" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.752248 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="2dde6251-f26b-4291-931e-30fce08578fd" containerName="prometheus" Oct 01 06:19:08 crc kubenswrapper[4661]: E1001 06:19:08.752274 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05da173d-e38a-42e7-8dfd-0b7acb56f065" containerName="registry-server" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.752280 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="05da173d-e38a-42e7-8dfd-0b7acb56f065" containerName="registry-server" Oct 01 06:19:08 crc kubenswrapper[4661]: E1001 06:19:08.752300 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2dde6251-f26b-4291-931e-30fce08578fd" containerName="init-config-reloader" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.752311 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="2dde6251-f26b-4291-931e-30fce08578fd" containerName="init-config-reloader" Oct 01 06:19:08 crc kubenswrapper[4661]: E1001 06:19:08.752343 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ee01ad9-84bb-4ec7-a002-d04e59632e7c" containerName="extract-utilities" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.752349 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ee01ad9-84bb-4ec7-a002-d04e59632e7c" containerName="extract-utilities" Oct 01 06:19:08 crc kubenswrapper[4661]: E1001 06:19:08.752374 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2dde6251-f26b-4291-931e-30fce08578fd" containerName="config-reloader" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.752382 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="2dde6251-f26b-4291-931e-30fce08578fd" containerName="config-reloader" Oct 01 06:19:08 crc kubenswrapper[4661]: E1001 06:19:08.752400 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ee01ad9-84bb-4ec7-a002-d04e59632e7c" containerName="extract-content" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.752406 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ee01ad9-84bb-4ec7-a002-d04e59632e7c" containerName="extract-content" Oct 01 06:19:08 crc kubenswrapper[4661]: E1001 06:19:08.752418 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ee01ad9-84bb-4ec7-a002-d04e59632e7c" containerName="registry-server" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.752424 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ee01ad9-84bb-4ec7-a002-d04e59632e7c" containerName="registry-server" Oct 01 06:19:08 crc kubenswrapper[4661]: E1001 06:19:08.752436 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2dde6251-f26b-4291-931e-30fce08578fd" containerName="thanos-sidecar" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.752442 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="2dde6251-f26b-4291-931e-30fce08578fd" containerName="thanos-sidecar" Oct 01 06:19:08 crc kubenswrapper[4661]: E1001 06:19:08.752461 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05da173d-e38a-42e7-8dfd-0b7acb56f065" containerName="extract-content" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.752467 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="05da173d-e38a-42e7-8dfd-0b7acb56f065" containerName="extract-content" Oct 01 06:19:08 crc kubenswrapper[4661]: E1001 06:19:08.752485 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05da173d-e38a-42e7-8dfd-0b7acb56f065" containerName="extract-utilities" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.752491 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="05da173d-e38a-42e7-8dfd-0b7acb56f065" containerName="extract-utilities" Oct 01 06:19:08 crc kubenswrapper[4661]: E1001 06:19:08.752510 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2078d83-8d53-4052-8b77-031948bc8705" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.752517 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2078d83-8d53-4052-8b77-031948bc8705" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.756037 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ee01ad9-84bb-4ec7-a002-d04e59632e7c" containerName="registry-server" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.756082 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="05da173d-e38a-42e7-8dfd-0b7acb56f065" containerName="registry-server" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.756108 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2078d83-8d53-4052-8b77-031948bc8705" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.756137 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="2dde6251-f26b-4291-931e-30fce08578fd" containerName="config-reloader" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.756147 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="2dde6251-f26b-4291-931e-30fce08578fd" containerName="prometheus" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.756162 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="2dde6251-f26b-4291-931e-30fce08578fd" containerName="thanos-sidecar" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.775787 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.785216 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.791568 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.791598 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.791857 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-rxwq4" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.791973 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.792036 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.796503 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.798440 4661 scope.go:117] "RemoveContainer" containerID="0306c5f0e811f1e2b0cf05468bc2bd80e46ab8aa58a9063b21bb3aae9898a509" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.882809 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-70227665-af75-4dfe-9648-95f31cf3d818\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70227665-af75-4dfe-9648-95f31cf3d818\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.882970 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8df66a7a-5bab-428f-b415-0e46c952f4fb-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.883004 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/8df66a7a-5bab-428f-b415-0e46c952f4fb-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.883130 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/8df66a7a-5bab-428f-b415-0e46c952f4fb-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.883748 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8df66a7a-5bab-428f-b415-0e46c952f4fb-config\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.883800 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nbgw\" (UniqueName: \"kubernetes.io/projected/8df66a7a-5bab-428f-b415-0e46c952f4fb-kube-api-access-9nbgw\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.883860 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/8df66a7a-5bab-428f-b415-0e46c952f4fb-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.883899 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/8df66a7a-5bab-428f-b415-0e46c952f4fb-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.883965 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/8df66a7a-5bab-428f-b415-0e46c952f4fb-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.884011 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/8df66a7a-5bab-428f-b415-0e46c952f4fb-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.884061 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/8df66a7a-5bab-428f-b415-0e46c952f4fb-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.985595 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/8df66a7a-5bab-428f-b415-0e46c952f4fb-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.986615 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8df66a7a-5bab-428f-b415-0e46c952f4fb-config\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.986981 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nbgw\" (UniqueName: \"kubernetes.io/projected/8df66a7a-5bab-428f-b415-0e46c952f4fb-kube-api-access-9nbgw\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.987005 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/8df66a7a-5bab-428f-b415-0e46c952f4fb-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.987040 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/8df66a7a-5bab-428f-b415-0e46c952f4fb-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.987065 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/8df66a7a-5bab-428f-b415-0e46c952f4fb-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.987083 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/8df66a7a-5bab-428f-b415-0e46c952f4fb-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.987125 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/8df66a7a-5bab-428f-b415-0e46c952f4fb-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.987153 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-70227665-af75-4dfe-9648-95f31cf3d818\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70227665-af75-4dfe-9648-95f31cf3d818\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.987258 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8df66a7a-5bab-428f-b415-0e46c952f4fb-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.987281 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/8df66a7a-5bab-428f-b415-0e46c952f4fb-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.988442 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/8df66a7a-5bab-428f-b415-0e46c952f4fb-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.992443 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/8df66a7a-5bab-428f-b415-0e46c952f4fb-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.993306 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/8df66a7a-5bab-428f-b415-0e46c952f4fb-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.993380 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/8df66a7a-5bab-428f-b415-0e46c952f4fb-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.994429 4661 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.994529 4661 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-70227665-af75-4dfe-9648-95f31cf3d818\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70227665-af75-4dfe-9648-95f31cf3d818\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e51bf99238560523215dee685b077c67fdd0498f27e19b8c5ba6a080034e1ca7/globalmount\"" pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:08 crc kubenswrapper[4661]: I1001 06:19:08.996701 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/8df66a7a-5bab-428f-b415-0e46c952f4fb-config\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:09 crc kubenswrapper[4661]: I1001 06:19:09.002704 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/8df66a7a-5bab-428f-b415-0e46c952f4fb-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:09 crc kubenswrapper[4661]: I1001 06:19:09.003018 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/8df66a7a-5bab-428f-b415-0e46c952f4fb-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:09 crc kubenswrapper[4661]: I1001 06:19:09.003591 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8df66a7a-5bab-428f-b415-0e46c952f4fb-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:09 crc kubenswrapper[4661]: I1001 06:19:09.006475 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/8df66a7a-5bab-428f-b415-0e46c952f4fb-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:09 crc kubenswrapper[4661]: I1001 06:19:09.009436 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nbgw\" (UniqueName: \"kubernetes.io/projected/8df66a7a-5bab-428f-b415-0e46c952f4fb-kube-api-access-9nbgw\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:09 crc kubenswrapper[4661]: I1001 06:19:09.050954 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-70227665-af75-4dfe-9648-95f31cf3d818\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70227665-af75-4dfe-9648-95f31cf3d818\") pod \"prometheus-metric-storage-0\" (UID: \"8df66a7a-5bab-428f-b415-0e46c952f4fb\") " pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:09 crc kubenswrapper[4661]: I1001 06:19:09.116603 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:09 crc kubenswrapper[4661]: W1001 06:19:09.598583 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8df66a7a_5bab_428f_b415_0e46c952f4fb.slice/crio-1fcfe6084432e9429d85ea5635f34a0b13688beee23cf60a272f0de199b27301 WatchSource:0}: Error finding container 1fcfe6084432e9429d85ea5635f34a0b13688beee23cf60a272f0de199b27301: Status 404 returned error can't find the container with id 1fcfe6084432e9429d85ea5635f34a0b13688beee23cf60a272f0de199b27301 Oct 01 06:19:09 crc kubenswrapper[4661]: I1001 06:19:09.601164 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 01 06:19:09 crc kubenswrapper[4661]: I1001 06:19:09.669168 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"8df66a7a-5bab-428f-b415-0e46c952f4fb","Type":"ContainerStarted","Data":"1fcfe6084432e9429d85ea5635f34a0b13688beee23cf60a272f0de199b27301"} Oct 01 06:19:09 crc kubenswrapper[4661]: I1001 06:19:09.769505 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2dde6251-f26b-4291-931e-30fce08578fd" path="/var/lib/kubelet/pods/2dde6251-f26b-4291-931e-30fce08578fd/volumes" Oct 01 06:19:13 crc kubenswrapper[4661]: I1001 06:19:13.717721 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"8df66a7a-5bab-428f-b415-0e46c952f4fb","Type":"ContainerStarted","Data":"a80a21ae866bf2869f818cbfeb41ded81c3e326706eae9ae6f5b64d3138e29ff"} Oct 01 06:19:20 crc kubenswrapper[4661]: I1001 06:19:20.757509 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:19:20 crc kubenswrapper[4661]: E1001 06:19:20.758322 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:19:22 crc kubenswrapper[4661]: I1001 06:19:22.847314 4661 generic.go:334] "Generic (PLEG): container finished" podID="8df66a7a-5bab-428f-b415-0e46c952f4fb" containerID="a80a21ae866bf2869f818cbfeb41ded81c3e326706eae9ae6f5b64d3138e29ff" exitCode=0 Oct 01 06:19:22 crc kubenswrapper[4661]: I1001 06:19:22.847374 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"8df66a7a-5bab-428f-b415-0e46c952f4fb","Type":"ContainerDied","Data":"a80a21ae866bf2869f818cbfeb41ded81c3e326706eae9ae6f5b64d3138e29ff"} Oct 01 06:19:23 crc kubenswrapper[4661]: I1001 06:19:23.864696 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"8df66a7a-5bab-428f-b415-0e46c952f4fb","Type":"ContainerStarted","Data":"12fcc3bc342b55648d56fb71c88dcf3bbfbcebb419fe82c5d00b5862389434d4"} Oct 01 06:19:27 crc kubenswrapper[4661]: I1001 06:19:27.917202 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"8df66a7a-5bab-428f-b415-0e46c952f4fb","Type":"ContainerStarted","Data":"31b4af5480e61239688fe28a769fa751ed92b0f6ee4486d34e266e8119288817"} Oct 01 06:19:28 crc kubenswrapper[4661]: I1001 06:19:28.930210 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"8df66a7a-5bab-428f-b415-0e46c952f4fb","Type":"ContainerStarted","Data":"d9bc7babbf65f609b852e23ad3b0d019d0b7182659a0e66f4afc08e244599f31"} Oct 01 06:19:28 crc kubenswrapper[4661]: I1001 06:19:28.974020 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=20.973997525 podStartE2EDuration="20.973997525s" podCreationTimestamp="2025-10-01 06:19:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:28.96205945 +0000 UTC m=+3017.900038074" watchObservedRunningTime="2025-10-01 06:19:28.973997525 +0000 UTC m=+3017.911976149" Oct 01 06:19:29 crc kubenswrapper[4661]: I1001 06:19:29.117410 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:35 crc kubenswrapper[4661]: I1001 06:19:35.759191 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:19:35 crc kubenswrapper[4661]: E1001 06:19:35.760299 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:19:39 crc kubenswrapper[4661]: I1001 06:19:39.117212 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:39 crc kubenswrapper[4661]: I1001 06:19:39.128170 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:40 crc kubenswrapper[4661]: I1001 06:19:40.090836 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Oct 01 06:19:47 crc kubenswrapper[4661]: I1001 06:19:47.757959 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:19:47 crc kubenswrapper[4661]: E1001 06:19:47.759308 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.021679 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.024046 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.025980 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.026315 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.026318 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.026894 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-vzff5" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.035744 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.092440 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.092523 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.092573 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.092609 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgw65\" (UniqueName: \"kubernetes.io/projected/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-kube-api-access-pgw65\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.092682 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.092704 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.092979 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-config-data\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.093258 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.093418 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.195063 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.195152 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.195240 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.195275 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.195319 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.195351 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgw65\" (UniqueName: \"kubernetes.io/projected/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-kube-api-access-pgw65\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.195405 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.195429 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.195491 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-config-data\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.195812 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.197030 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-config-data\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.197588 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.198545 4661 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.201376 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.203624 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.203775 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.207307 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.218282 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgw65\" (UniqueName: \"kubernetes.io/projected/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-kube-api-access-pgw65\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.233503 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"tempest-tests-tempest\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.353737 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 01 06:19:49 crc kubenswrapper[4661]: I1001 06:19:49.852044 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 01 06:19:49 crc kubenswrapper[4661]: W1001 06:19:49.858147 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff1ca911_a470_4bfb_8cc2_3f76257eed1f.slice/crio-6f56d09f533ef75a4725d6def602650669888641b9d903a73ca51e4d1f9dd78b WatchSource:0}: Error finding container 6f56d09f533ef75a4725d6def602650669888641b9d903a73ca51e4d1f9dd78b: Status 404 returned error can't find the container with id 6f56d09f533ef75a4725d6def602650669888641b9d903a73ca51e4d1f9dd78b Oct 01 06:19:50 crc kubenswrapper[4661]: I1001 06:19:50.205858 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"ff1ca911-a470-4bfb-8cc2-3f76257eed1f","Type":"ContainerStarted","Data":"6f56d09f533ef75a4725d6def602650669888641b9d903a73ca51e4d1f9dd78b"} Oct 01 06:20:01 crc kubenswrapper[4661]: I1001 06:20:01.764939 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:20:01 crc kubenswrapper[4661]: E1001 06:20:01.765579 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:20:07 crc kubenswrapper[4661]: E1001 06:20:07.193587 4661 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-tempest-all:current" Oct 01 06:20:07 crc kubenswrapper[4661]: E1001 06:20:07.195477 4661 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-tempest-all:current" Oct 01 06:20:07 crc kubenswrapper[4661]: E1001 06:20:07.195851 4661 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.rdoproject.org/podified-master-centos10/openstack-tempest-all:current,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pgw65,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(ff1ca911-a470-4bfb-8cc2-3f76257eed1f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 06:20:07 crc kubenswrapper[4661]: E1001 06:20:07.197112 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="ff1ca911-a470-4bfb-8cc2-3f76257eed1f" Oct 01 06:20:07 crc kubenswrapper[4661]: E1001 06:20:07.390181 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-tempest-all:current\\\"\"" pod="openstack/tempest-tests-tempest" podUID="ff1ca911-a470-4bfb-8cc2-3f76257eed1f" Oct 01 06:20:15 crc kubenswrapper[4661]: I1001 06:20:15.757058 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:20:15 crc kubenswrapper[4661]: E1001 06:20:15.760174 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:20:22 crc kubenswrapper[4661]: I1001 06:20:22.550144 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 01 06:20:24 crc kubenswrapper[4661]: I1001 06:20:24.601056 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"ff1ca911-a470-4bfb-8cc2-3f76257eed1f","Type":"ContainerStarted","Data":"0d5204703699537944e4365a5d393794783bdae86fcc798530908f76cf9e56af"} Oct 01 06:20:24 crc kubenswrapper[4661]: I1001 06:20:24.630561 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=4.944233082 podStartE2EDuration="37.630539835s" podCreationTimestamp="2025-10-01 06:19:47 +0000 UTC" firstStartedPulling="2025-10-01 06:19:49.859997412 +0000 UTC m=+3038.797976026" lastFinishedPulling="2025-10-01 06:20:22.546304135 +0000 UTC m=+3071.484282779" observedRunningTime="2025-10-01 06:20:24.623184255 +0000 UTC m=+3073.561162909" watchObservedRunningTime="2025-10-01 06:20:24.630539835 +0000 UTC m=+3073.568518449" Oct 01 06:20:27 crc kubenswrapper[4661]: I1001 06:20:27.758211 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:20:27 crc kubenswrapper[4661]: E1001 06:20:27.759075 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:20:40 crc kubenswrapper[4661]: I1001 06:20:40.756558 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:20:40 crc kubenswrapper[4661]: E1001 06:20:40.757472 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:20:51 crc kubenswrapper[4661]: I1001 06:20:51.762794 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:20:51 crc kubenswrapper[4661]: E1001 06:20:51.763619 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:21:06 crc kubenswrapper[4661]: I1001 06:21:06.757887 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:21:06 crc kubenswrapper[4661]: E1001 06:21:06.759013 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:21:17 crc kubenswrapper[4661]: I1001 06:21:17.757830 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:21:17 crc kubenswrapper[4661]: E1001 06:21:17.759083 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:21:31 crc kubenswrapper[4661]: I1001 06:21:31.763169 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:21:31 crc kubenswrapper[4661]: E1001 06:21:31.763929 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:21:42 crc kubenswrapper[4661]: I1001 06:21:42.757622 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:21:42 crc kubenswrapper[4661]: E1001 06:21:42.758678 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:21:54 crc kubenswrapper[4661]: I1001 06:21:54.757880 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:21:54 crc kubenswrapper[4661]: E1001 06:21:54.758979 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:22:05 crc kubenswrapper[4661]: I1001 06:22:05.758180 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:22:05 crc kubenswrapper[4661]: E1001 06:22:05.759436 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:22:16 crc kubenswrapper[4661]: I1001 06:22:16.757008 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:22:16 crc kubenswrapper[4661]: E1001 06:22:16.757962 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:22:30 crc kubenswrapper[4661]: I1001 06:22:30.757553 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:22:30 crc kubenswrapper[4661]: E1001 06:22:30.758700 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:22:44 crc kubenswrapper[4661]: I1001 06:22:44.757783 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:22:45 crc kubenswrapper[4661]: I1001 06:22:45.219418 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"ee8b9b6ed03e4eeb9e88b6d3ab66a07f435540b1756267f19970906565ed843e"} Oct 01 06:24:49 crc kubenswrapper[4661]: I1001 06:24:49.589714 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kvcg2"] Oct 01 06:24:49 crc kubenswrapper[4661]: I1001 06:24:49.593616 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kvcg2" Oct 01 06:24:49 crc kubenswrapper[4661]: I1001 06:24:49.605444 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kvcg2"] Oct 01 06:24:49 crc kubenswrapper[4661]: I1001 06:24:49.715586 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgfbw\" (UniqueName: \"kubernetes.io/projected/3f674990-47a4-4a6f-bab8-44dd669cee1f-kube-api-access-xgfbw\") pod \"community-operators-kvcg2\" (UID: \"3f674990-47a4-4a6f-bab8-44dd669cee1f\") " pod="openshift-marketplace/community-operators-kvcg2" Oct 01 06:24:49 crc kubenswrapper[4661]: I1001 06:24:49.716618 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f674990-47a4-4a6f-bab8-44dd669cee1f-utilities\") pod \"community-operators-kvcg2\" (UID: \"3f674990-47a4-4a6f-bab8-44dd669cee1f\") " pod="openshift-marketplace/community-operators-kvcg2" Oct 01 06:24:49 crc kubenswrapper[4661]: I1001 06:24:49.716737 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f674990-47a4-4a6f-bab8-44dd669cee1f-catalog-content\") pod \"community-operators-kvcg2\" (UID: \"3f674990-47a4-4a6f-bab8-44dd669cee1f\") " pod="openshift-marketplace/community-operators-kvcg2" Oct 01 06:24:49 crc kubenswrapper[4661]: I1001 06:24:49.818828 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f674990-47a4-4a6f-bab8-44dd669cee1f-catalog-content\") pod \"community-operators-kvcg2\" (UID: \"3f674990-47a4-4a6f-bab8-44dd669cee1f\") " pod="openshift-marketplace/community-operators-kvcg2" Oct 01 06:24:49 crc kubenswrapper[4661]: I1001 06:24:49.819030 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgfbw\" (UniqueName: \"kubernetes.io/projected/3f674990-47a4-4a6f-bab8-44dd669cee1f-kube-api-access-xgfbw\") pod \"community-operators-kvcg2\" (UID: \"3f674990-47a4-4a6f-bab8-44dd669cee1f\") " pod="openshift-marketplace/community-operators-kvcg2" Oct 01 06:24:49 crc kubenswrapper[4661]: I1001 06:24:49.819096 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f674990-47a4-4a6f-bab8-44dd669cee1f-utilities\") pod \"community-operators-kvcg2\" (UID: \"3f674990-47a4-4a6f-bab8-44dd669cee1f\") " pod="openshift-marketplace/community-operators-kvcg2" Oct 01 06:24:49 crc kubenswrapper[4661]: I1001 06:24:49.819430 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f674990-47a4-4a6f-bab8-44dd669cee1f-utilities\") pod \"community-operators-kvcg2\" (UID: \"3f674990-47a4-4a6f-bab8-44dd669cee1f\") " pod="openshift-marketplace/community-operators-kvcg2" Oct 01 06:24:49 crc kubenswrapper[4661]: I1001 06:24:49.819571 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f674990-47a4-4a6f-bab8-44dd669cee1f-catalog-content\") pod \"community-operators-kvcg2\" (UID: \"3f674990-47a4-4a6f-bab8-44dd669cee1f\") " pod="openshift-marketplace/community-operators-kvcg2" Oct 01 06:24:49 crc kubenswrapper[4661]: I1001 06:24:49.842715 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgfbw\" (UniqueName: \"kubernetes.io/projected/3f674990-47a4-4a6f-bab8-44dd669cee1f-kube-api-access-xgfbw\") pod \"community-operators-kvcg2\" (UID: \"3f674990-47a4-4a6f-bab8-44dd669cee1f\") " pod="openshift-marketplace/community-operators-kvcg2" Oct 01 06:24:49 crc kubenswrapper[4661]: I1001 06:24:49.915334 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kvcg2" Oct 01 06:24:50 crc kubenswrapper[4661]: I1001 06:24:50.653587 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kvcg2"] Oct 01 06:24:50 crc kubenswrapper[4661]: I1001 06:24:50.788565 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kvcg2" event={"ID":"3f674990-47a4-4a6f-bab8-44dd669cee1f","Type":"ContainerStarted","Data":"05a11f81334ea1db3f2db9375aa3fa765e87aca791d94becb544f4e9cd90bbee"} Oct 01 06:24:51 crc kubenswrapper[4661]: I1001 06:24:51.814833 4661 generic.go:334] "Generic (PLEG): container finished" podID="3f674990-47a4-4a6f-bab8-44dd669cee1f" containerID="538008828ea9a6d8ab2a4f2076080c308b705a4504f9ace3e9d792262f8f6201" exitCode=0 Oct 01 06:24:51 crc kubenswrapper[4661]: I1001 06:24:51.814934 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kvcg2" event={"ID":"3f674990-47a4-4a6f-bab8-44dd669cee1f","Type":"ContainerDied","Data":"538008828ea9a6d8ab2a4f2076080c308b705a4504f9ace3e9d792262f8f6201"} Oct 01 06:24:51 crc kubenswrapper[4661]: I1001 06:24:51.818489 4661 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 06:24:52 crc kubenswrapper[4661]: I1001 06:24:52.826445 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kvcg2" event={"ID":"3f674990-47a4-4a6f-bab8-44dd669cee1f","Type":"ContainerStarted","Data":"6f108cba5e5d2fa7385138d2743e6342cbe4e6ed492d830866c38b400a488269"} Oct 01 06:24:53 crc kubenswrapper[4661]: I1001 06:24:53.843829 4661 generic.go:334] "Generic (PLEG): container finished" podID="3f674990-47a4-4a6f-bab8-44dd669cee1f" containerID="6f108cba5e5d2fa7385138d2743e6342cbe4e6ed492d830866c38b400a488269" exitCode=0 Oct 01 06:24:53 crc kubenswrapper[4661]: I1001 06:24:53.843980 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kvcg2" event={"ID":"3f674990-47a4-4a6f-bab8-44dd669cee1f","Type":"ContainerDied","Data":"6f108cba5e5d2fa7385138d2743e6342cbe4e6ed492d830866c38b400a488269"} Oct 01 06:24:54 crc kubenswrapper[4661]: I1001 06:24:54.856940 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kvcg2" event={"ID":"3f674990-47a4-4a6f-bab8-44dd669cee1f","Type":"ContainerStarted","Data":"b08c7ae1eb62793d8aae82dab875e4693bee5a500372a0eb9546c9285db60293"} Oct 01 06:24:54 crc kubenswrapper[4661]: I1001 06:24:54.877433 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kvcg2" podStartSLOduration=3.360212973 podStartE2EDuration="5.877413349s" podCreationTimestamp="2025-10-01 06:24:49 +0000 UTC" firstStartedPulling="2025-10-01 06:24:51.818280388 +0000 UTC m=+3340.756259002" lastFinishedPulling="2025-10-01 06:24:54.335480744 +0000 UTC m=+3343.273459378" observedRunningTime="2025-10-01 06:24:54.87668577 +0000 UTC m=+3343.814664424" watchObservedRunningTime="2025-10-01 06:24:54.877413349 +0000 UTC m=+3343.815391963" Oct 01 06:24:59 crc kubenswrapper[4661]: I1001 06:24:59.916047 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kvcg2" Oct 01 06:24:59 crc kubenswrapper[4661]: I1001 06:24:59.918389 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kvcg2" Oct 01 06:24:59 crc kubenswrapper[4661]: I1001 06:24:59.996667 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kvcg2" Oct 01 06:25:01 crc kubenswrapper[4661]: I1001 06:25:01.026116 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kvcg2" Oct 01 06:25:01 crc kubenswrapper[4661]: I1001 06:25:01.099919 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kvcg2"] Oct 01 06:25:02 crc kubenswrapper[4661]: I1001 06:25:02.954304 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kvcg2" podUID="3f674990-47a4-4a6f-bab8-44dd669cee1f" containerName="registry-server" containerID="cri-o://b08c7ae1eb62793d8aae82dab875e4693bee5a500372a0eb9546c9285db60293" gracePeriod=2 Oct 01 06:25:03 crc kubenswrapper[4661]: I1001 06:25:03.498712 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kvcg2" Oct 01 06:25:03 crc kubenswrapper[4661]: I1001 06:25:03.602000 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f674990-47a4-4a6f-bab8-44dd669cee1f-catalog-content\") pod \"3f674990-47a4-4a6f-bab8-44dd669cee1f\" (UID: \"3f674990-47a4-4a6f-bab8-44dd669cee1f\") " Oct 01 06:25:03 crc kubenswrapper[4661]: I1001 06:25:03.602236 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f674990-47a4-4a6f-bab8-44dd669cee1f-utilities\") pod \"3f674990-47a4-4a6f-bab8-44dd669cee1f\" (UID: \"3f674990-47a4-4a6f-bab8-44dd669cee1f\") " Oct 01 06:25:03 crc kubenswrapper[4661]: I1001 06:25:03.602298 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgfbw\" (UniqueName: \"kubernetes.io/projected/3f674990-47a4-4a6f-bab8-44dd669cee1f-kube-api-access-xgfbw\") pod \"3f674990-47a4-4a6f-bab8-44dd669cee1f\" (UID: \"3f674990-47a4-4a6f-bab8-44dd669cee1f\") " Oct 01 06:25:03 crc kubenswrapper[4661]: I1001 06:25:03.603186 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f674990-47a4-4a6f-bab8-44dd669cee1f-utilities" (OuterVolumeSpecName: "utilities") pod "3f674990-47a4-4a6f-bab8-44dd669cee1f" (UID: "3f674990-47a4-4a6f-bab8-44dd669cee1f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:25:03 crc kubenswrapper[4661]: I1001 06:25:03.609511 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f674990-47a4-4a6f-bab8-44dd669cee1f-kube-api-access-xgfbw" (OuterVolumeSpecName: "kube-api-access-xgfbw") pod "3f674990-47a4-4a6f-bab8-44dd669cee1f" (UID: "3f674990-47a4-4a6f-bab8-44dd669cee1f"). InnerVolumeSpecName "kube-api-access-xgfbw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:25:03 crc kubenswrapper[4661]: I1001 06:25:03.668453 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f674990-47a4-4a6f-bab8-44dd669cee1f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3f674990-47a4-4a6f-bab8-44dd669cee1f" (UID: "3f674990-47a4-4a6f-bab8-44dd669cee1f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:25:03 crc kubenswrapper[4661]: I1001 06:25:03.704416 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f674990-47a4-4a6f-bab8-44dd669cee1f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:25:03 crc kubenswrapper[4661]: I1001 06:25:03.704452 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f674990-47a4-4a6f-bab8-44dd669cee1f-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:25:03 crc kubenswrapper[4661]: I1001 06:25:03.704463 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgfbw\" (UniqueName: \"kubernetes.io/projected/3f674990-47a4-4a6f-bab8-44dd669cee1f-kube-api-access-xgfbw\") on node \"crc\" DevicePath \"\"" Oct 01 06:25:03 crc kubenswrapper[4661]: I1001 06:25:03.965283 4661 generic.go:334] "Generic (PLEG): container finished" podID="3f674990-47a4-4a6f-bab8-44dd669cee1f" containerID="b08c7ae1eb62793d8aae82dab875e4693bee5a500372a0eb9546c9285db60293" exitCode=0 Oct 01 06:25:03 crc kubenswrapper[4661]: I1001 06:25:03.965334 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kvcg2" event={"ID":"3f674990-47a4-4a6f-bab8-44dd669cee1f","Type":"ContainerDied","Data":"b08c7ae1eb62793d8aae82dab875e4693bee5a500372a0eb9546c9285db60293"} Oct 01 06:25:03 crc kubenswrapper[4661]: I1001 06:25:03.965359 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kvcg2" event={"ID":"3f674990-47a4-4a6f-bab8-44dd669cee1f","Type":"ContainerDied","Data":"05a11f81334ea1db3f2db9375aa3fa765e87aca791d94becb544f4e9cd90bbee"} Oct 01 06:25:03 crc kubenswrapper[4661]: I1001 06:25:03.965366 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kvcg2" Oct 01 06:25:03 crc kubenswrapper[4661]: I1001 06:25:03.965375 4661 scope.go:117] "RemoveContainer" containerID="b08c7ae1eb62793d8aae82dab875e4693bee5a500372a0eb9546c9285db60293" Oct 01 06:25:03 crc kubenswrapper[4661]: I1001 06:25:03.988218 4661 scope.go:117] "RemoveContainer" containerID="6f108cba5e5d2fa7385138d2743e6342cbe4e6ed492d830866c38b400a488269" Oct 01 06:25:03 crc kubenswrapper[4661]: I1001 06:25:03.997881 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kvcg2"] Oct 01 06:25:04 crc kubenswrapper[4661]: I1001 06:25:04.005765 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kvcg2"] Oct 01 06:25:04 crc kubenswrapper[4661]: I1001 06:25:04.017118 4661 scope.go:117] "RemoveContainer" containerID="538008828ea9a6d8ab2a4f2076080c308b705a4504f9ace3e9d792262f8f6201" Oct 01 06:25:04 crc kubenswrapper[4661]: I1001 06:25:04.079020 4661 scope.go:117] "RemoveContainer" containerID="b08c7ae1eb62793d8aae82dab875e4693bee5a500372a0eb9546c9285db60293" Oct 01 06:25:04 crc kubenswrapper[4661]: E1001 06:25:04.079569 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b08c7ae1eb62793d8aae82dab875e4693bee5a500372a0eb9546c9285db60293\": container with ID starting with b08c7ae1eb62793d8aae82dab875e4693bee5a500372a0eb9546c9285db60293 not found: ID does not exist" containerID="b08c7ae1eb62793d8aae82dab875e4693bee5a500372a0eb9546c9285db60293" Oct 01 06:25:04 crc kubenswrapper[4661]: I1001 06:25:04.079707 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b08c7ae1eb62793d8aae82dab875e4693bee5a500372a0eb9546c9285db60293"} err="failed to get container status \"b08c7ae1eb62793d8aae82dab875e4693bee5a500372a0eb9546c9285db60293\": rpc error: code = NotFound desc = could not find container \"b08c7ae1eb62793d8aae82dab875e4693bee5a500372a0eb9546c9285db60293\": container with ID starting with b08c7ae1eb62793d8aae82dab875e4693bee5a500372a0eb9546c9285db60293 not found: ID does not exist" Oct 01 06:25:04 crc kubenswrapper[4661]: I1001 06:25:04.079762 4661 scope.go:117] "RemoveContainer" containerID="6f108cba5e5d2fa7385138d2743e6342cbe4e6ed492d830866c38b400a488269" Oct 01 06:25:04 crc kubenswrapper[4661]: E1001 06:25:04.080508 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f108cba5e5d2fa7385138d2743e6342cbe4e6ed492d830866c38b400a488269\": container with ID starting with 6f108cba5e5d2fa7385138d2743e6342cbe4e6ed492d830866c38b400a488269 not found: ID does not exist" containerID="6f108cba5e5d2fa7385138d2743e6342cbe4e6ed492d830866c38b400a488269" Oct 01 06:25:04 crc kubenswrapper[4661]: I1001 06:25:04.080608 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f108cba5e5d2fa7385138d2743e6342cbe4e6ed492d830866c38b400a488269"} err="failed to get container status \"6f108cba5e5d2fa7385138d2743e6342cbe4e6ed492d830866c38b400a488269\": rpc error: code = NotFound desc = could not find container \"6f108cba5e5d2fa7385138d2743e6342cbe4e6ed492d830866c38b400a488269\": container with ID starting with 6f108cba5e5d2fa7385138d2743e6342cbe4e6ed492d830866c38b400a488269 not found: ID does not exist" Oct 01 06:25:04 crc kubenswrapper[4661]: I1001 06:25:04.080699 4661 scope.go:117] "RemoveContainer" containerID="538008828ea9a6d8ab2a4f2076080c308b705a4504f9ace3e9d792262f8f6201" Oct 01 06:25:04 crc kubenswrapper[4661]: E1001 06:25:04.081268 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"538008828ea9a6d8ab2a4f2076080c308b705a4504f9ace3e9d792262f8f6201\": container with ID starting with 538008828ea9a6d8ab2a4f2076080c308b705a4504f9ace3e9d792262f8f6201 not found: ID does not exist" containerID="538008828ea9a6d8ab2a4f2076080c308b705a4504f9ace3e9d792262f8f6201" Oct 01 06:25:04 crc kubenswrapper[4661]: I1001 06:25:04.081330 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"538008828ea9a6d8ab2a4f2076080c308b705a4504f9ace3e9d792262f8f6201"} err="failed to get container status \"538008828ea9a6d8ab2a4f2076080c308b705a4504f9ace3e9d792262f8f6201\": rpc error: code = NotFound desc = could not find container \"538008828ea9a6d8ab2a4f2076080c308b705a4504f9ace3e9d792262f8f6201\": container with ID starting with 538008828ea9a6d8ab2a4f2076080c308b705a4504f9ace3e9d792262f8f6201 not found: ID does not exist" Oct 01 06:25:04 crc kubenswrapper[4661]: I1001 06:25:04.309314 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:25:04 crc kubenswrapper[4661]: I1001 06:25:04.309375 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:25:05 crc kubenswrapper[4661]: I1001 06:25:05.772731 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f674990-47a4-4a6f-bab8-44dd669cee1f" path="/var/lib/kubelet/pods/3f674990-47a4-4a6f-bab8-44dd669cee1f/volumes" Oct 01 06:25:34 crc kubenswrapper[4661]: I1001 06:25:34.309200 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:25:34 crc kubenswrapper[4661]: I1001 06:25:34.310021 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:26:04 crc kubenswrapper[4661]: I1001 06:26:04.309535 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:26:04 crc kubenswrapper[4661]: I1001 06:26:04.310467 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:26:04 crc kubenswrapper[4661]: I1001 06:26:04.310559 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 06:26:04 crc kubenswrapper[4661]: I1001 06:26:04.312076 4661 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ee8b9b6ed03e4eeb9e88b6d3ab66a07f435540b1756267f19970906565ed843e"} pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 06:26:04 crc kubenswrapper[4661]: I1001 06:26:04.312221 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" containerID="cri-o://ee8b9b6ed03e4eeb9e88b6d3ab66a07f435540b1756267f19970906565ed843e" gracePeriod=600 Oct 01 06:26:04 crc kubenswrapper[4661]: I1001 06:26:04.738725 4661 generic.go:334] "Generic (PLEG): container finished" podID="7584c4bc-4202-487e-a2b4-4319f428a792" containerID="ee8b9b6ed03e4eeb9e88b6d3ab66a07f435540b1756267f19970906565ed843e" exitCode=0 Oct 01 06:26:04 crc kubenswrapper[4661]: I1001 06:26:04.738953 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerDied","Data":"ee8b9b6ed03e4eeb9e88b6d3ab66a07f435540b1756267f19970906565ed843e"} Oct 01 06:26:04 crc kubenswrapper[4661]: I1001 06:26:04.739162 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e"} Oct 01 06:26:04 crc kubenswrapper[4661]: I1001 06:26:04.739197 4661 scope.go:117] "RemoveContainer" containerID="7406515fd531ab415ed30b376b092dc587e70616e8fbebb9d0598d375fa37971" Oct 01 06:26:36 crc kubenswrapper[4661]: I1001 06:26:36.740129 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-x4fqt"] Oct 01 06:26:36 crc kubenswrapper[4661]: E1001 06:26:36.741397 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f674990-47a4-4a6f-bab8-44dd669cee1f" containerName="extract-content" Oct 01 06:26:36 crc kubenswrapper[4661]: I1001 06:26:36.741414 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f674990-47a4-4a6f-bab8-44dd669cee1f" containerName="extract-content" Oct 01 06:26:36 crc kubenswrapper[4661]: E1001 06:26:36.741436 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f674990-47a4-4a6f-bab8-44dd669cee1f" containerName="registry-server" Oct 01 06:26:36 crc kubenswrapper[4661]: I1001 06:26:36.741445 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f674990-47a4-4a6f-bab8-44dd669cee1f" containerName="registry-server" Oct 01 06:26:36 crc kubenswrapper[4661]: E1001 06:26:36.741467 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f674990-47a4-4a6f-bab8-44dd669cee1f" containerName="extract-utilities" Oct 01 06:26:36 crc kubenswrapper[4661]: I1001 06:26:36.741476 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f674990-47a4-4a6f-bab8-44dd669cee1f" containerName="extract-utilities" Oct 01 06:26:36 crc kubenswrapper[4661]: I1001 06:26:36.741740 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f674990-47a4-4a6f-bab8-44dd669cee1f" containerName="registry-server" Oct 01 06:26:36 crc kubenswrapper[4661]: I1001 06:26:36.743376 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x4fqt" Oct 01 06:26:36 crc kubenswrapper[4661]: I1001 06:26:36.763664 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-x4fqt"] Oct 01 06:26:36 crc kubenswrapper[4661]: I1001 06:26:36.806628 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pghnl\" (UniqueName: \"kubernetes.io/projected/956de59f-5207-415b-af89-2a0ea2bf434e-kube-api-access-pghnl\") pod \"redhat-operators-x4fqt\" (UID: \"956de59f-5207-415b-af89-2a0ea2bf434e\") " pod="openshift-marketplace/redhat-operators-x4fqt" Oct 01 06:26:36 crc kubenswrapper[4661]: I1001 06:26:36.806712 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/956de59f-5207-415b-af89-2a0ea2bf434e-catalog-content\") pod \"redhat-operators-x4fqt\" (UID: \"956de59f-5207-415b-af89-2a0ea2bf434e\") " pod="openshift-marketplace/redhat-operators-x4fqt" Oct 01 06:26:36 crc kubenswrapper[4661]: I1001 06:26:36.806772 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/956de59f-5207-415b-af89-2a0ea2bf434e-utilities\") pod \"redhat-operators-x4fqt\" (UID: \"956de59f-5207-415b-af89-2a0ea2bf434e\") " pod="openshift-marketplace/redhat-operators-x4fqt" Oct 01 06:26:36 crc kubenswrapper[4661]: I1001 06:26:36.909764 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pghnl\" (UniqueName: \"kubernetes.io/projected/956de59f-5207-415b-af89-2a0ea2bf434e-kube-api-access-pghnl\") pod \"redhat-operators-x4fqt\" (UID: \"956de59f-5207-415b-af89-2a0ea2bf434e\") " pod="openshift-marketplace/redhat-operators-x4fqt" Oct 01 06:26:36 crc kubenswrapper[4661]: I1001 06:26:36.909839 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/956de59f-5207-415b-af89-2a0ea2bf434e-catalog-content\") pod \"redhat-operators-x4fqt\" (UID: \"956de59f-5207-415b-af89-2a0ea2bf434e\") " pod="openshift-marketplace/redhat-operators-x4fqt" Oct 01 06:26:36 crc kubenswrapper[4661]: I1001 06:26:36.909898 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/956de59f-5207-415b-af89-2a0ea2bf434e-utilities\") pod \"redhat-operators-x4fqt\" (UID: \"956de59f-5207-415b-af89-2a0ea2bf434e\") " pod="openshift-marketplace/redhat-operators-x4fqt" Oct 01 06:26:36 crc kubenswrapper[4661]: I1001 06:26:36.910608 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/956de59f-5207-415b-af89-2a0ea2bf434e-utilities\") pod \"redhat-operators-x4fqt\" (UID: \"956de59f-5207-415b-af89-2a0ea2bf434e\") " pod="openshift-marketplace/redhat-operators-x4fqt" Oct 01 06:26:36 crc kubenswrapper[4661]: I1001 06:26:36.910711 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/956de59f-5207-415b-af89-2a0ea2bf434e-catalog-content\") pod \"redhat-operators-x4fqt\" (UID: \"956de59f-5207-415b-af89-2a0ea2bf434e\") " pod="openshift-marketplace/redhat-operators-x4fqt" Oct 01 06:26:36 crc kubenswrapper[4661]: I1001 06:26:36.937645 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pghnl\" (UniqueName: \"kubernetes.io/projected/956de59f-5207-415b-af89-2a0ea2bf434e-kube-api-access-pghnl\") pod \"redhat-operators-x4fqt\" (UID: \"956de59f-5207-415b-af89-2a0ea2bf434e\") " pod="openshift-marketplace/redhat-operators-x4fqt" Oct 01 06:26:37 crc kubenswrapper[4661]: I1001 06:26:37.086968 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x4fqt" Oct 01 06:26:37 crc kubenswrapper[4661]: I1001 06:26:37.570051 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-x4fqt"] Oct 01 06:26:38 crc kubenswrapper[4661]: I1001 06:26:38.162855 4661 generic.go:334] "Generic (PLEG): container finished" podID="956de59f-5207-415b-af89-2a0ea2bf434e" containerID="88a11491315f2ccd288215554c6ecaba194faad7de62dcce30eb9fd4ec783eb9" exitCode=0 Oct 01 06:26:38 crc kubenswrapper[4661]: I1001 06:26:38.162936 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x4fqt" event={"ID":"956de59f-5207-415b-af89-2a0ea2bf434e","Type":"ContainerDied","Data":"88a11491315f2ccd288215554c6ecaba194faad7de62dcce30eb9fd4ec783eb9"} Oct 01 06:26:38 crc kubenswrapper[4661]: I1001 06:26:38.163020 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x4fqt" event={"ID":"956de59f-5207-415b-af89-2a0ea2bf434e","Type":"ContainerStarted","Data":"17e30c50292a48ee8e3d7a9830abc0325ab2855a542c7741b5d298e1eb1fe8ef"} Oct 01 06:26:40 crc kubenswrapper[4661]: I1001 06:26:40.199266 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x4fqt" event={"ID":"956de59f-5207-415b-af89-2a0ea2bf434e","Type":"ContainerStarted","Data":"ee4ead8ec0ce9a11aa2207bbb1f2931299818f37b3f95b9455d1da60da30560a"} Oct 01 06:26:43 crc kubenswrapper[4661]: I1001 06:26:43.236208 4661 generic.go:334] "Generic (PLEG): container finished" podID="956de59f-5207-415b-af89-2a0ea2bf434e" containerID="ee4ead8ec0ce9a11aa2207bbb1f2931299818f37b3f95b9455d1da60da30560a" exitCode=0 Oct 01 06:26:43 crc kubenswrapper[4661]: I1001 06:26:43.236318 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x4fqt" event={"ID":"956de59f-5207-415b-af89-2a0ea2bf434e","Type":"ContainerDied","Data":"ee4ead8ec0ce9a11aa2207bbb1f2931299818f37b3f95b9455d1da60da30560a"} Oct 01 06:26:44 crc kubenswrapper[4661]: I1001 06:26:44.248301 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x4fqt" event={"ID":"956de59f-5207-415b-af89-2a0ea2bf434e","Type":"ContainerStarted","Data":"df20f521b04d536a1b0fcc02312f941bb7d5b15793e69c9e18379e53812a4e17"} Oct 01 06:26:44 crc kubenswrapper[4661]: I1001 06:26:44.284877 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-x4fqt" podStartSLOduration=2.78788066 podStartE2EDuration="8.284846418s" podCreationTimestamp="2025-10-01 06:26:36 +0000 UTC" firstStartedPulling="2025-10-01 06:26:38.166745349 +0000 UTC m=+3447.104723993" lastFinishedPulling="2025-10-01 06:26:43.663711097 +0000 UTC m=+3452.601689751" observedRunningTime="2025-10-01 06:26:44.2765028 +0000 UTC m=+3453.214481424" watchObservedRunningTime="2025-10-01 06:26:44.284846418 +0000 UTC m=+3453.222825042" Oct 01 06:26:47 crc kubenswrapper[4661]: I1001 06:26:47.087966 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-x4fqt" Oct 01 06:26:47 crc kubenswrapper[4661]: I1001 06:26:47.088779 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-x4fqt" Oct 01 06:26:48 crc kubenswrapper[4661]: I1001 06:26:48.166407 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-x4fqt" podUID="956de59f-5207-415b-af89-2a0ea2bf434e" containerName="registry-server" probeResult="failure" output=< Oct 01 06:26:48 crc kubenswrapper[4661]: timeout: failed to connect service ":50051" within 1s Oct 01 06:26:48 crc kubenswrapper[4661]: > Oct 01 06:26:57 crc kubenswrapper[4661]: I1001 06:26:57.132557 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-x4fqt" Oct 01 06:26:57 crc kubenswrapper[4661]: I1001 06:26:57.217587 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-x4fqt" Oct 01 06:26:57 crc kubenswrapper[4661]: I1001 06:26:57.388194 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-x4fqt"] Oct 01 06:26:58 crc kubenswrapper[4661]: I1001 06:26:58.411203 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-x4fqt" podUID="956de59f-5207-415b-af89-2a0ea2bf434e" containerName="registry-server" containerID="cri-o://df20f521b04d536a1b0fcc02312f941bb7d5b15793e69c9e18379e53812a4e17" gracePeriod=2 Oct 01 06:26:58 crc kubenswrapper[4661]: I1001 06:26:58.963008 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x4fqt" Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.008595 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/956de59f-5207-415b-af89-2a0ea2bf434e-catalog-content\") pod \"956de59f-5207-415b-af89-2a0ea2bf434e\" (UID: \"956de59f-5207-415b-af89-2a0ea2bf434e\") " Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.008797 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/956de59f-5207-415b-af89-2a0ea2bf434e-utilities\") pod \"956de59f-5207-415b-af89-2a0ea2bf434e\" (UID: \"956de59f-5207-415b-af89-2a0ea2bf434e\") " Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.008908 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pghnl\" (UniqueName: \"kubernetes.io/projected/956de59f-5207-415b-af89-2a0ea2bf434e-kube-api-access-pghnl\") pod \"956de59f-5207-415b-af89-2a0ea2bf434e\" (UID: \"956de59f-5207-415b-af89-2a0ea2bf434e\") " Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.010959 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/956de59f-5207-415b-af89-2a0ea2bf434e-utilities" (OuterVolumeSpecName: "utilities") pod "956de59f-5207-415b-af89-2a0ea2bf434e" (UID: "956de59f-5207-415b-af89-2a0ea2bf434e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.024985 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/956de59f-5207-415b-af89-2a0ea2bf434e-kube-api-access-pghnl" (OuterVolumeSpecName: "kube-api-access-pghnl") pod "956de59f-5207-415b-af89-2a0ea2bf434e" (UID: "956de59f-5207-415b-af89-2a0ea2bf434e"). InnerVolumeSpecName "kube-api-access-pghnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.093810 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/956de59f-5207-415b-af89-2a0ea2bf434e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "956de59f-5207-415b-af89-2a0ea2bf434e" (UID: "956de59f-5207-415b-af89-2a0ea2bf434e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.111458 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/956de59f-5207-415b-af89-2a0ea2bf434e-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.111624 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pghnl\" (UniqueName: \"kubernetes.io/projected/956de59f-5207-415b-af89-2a0ea2bf434e-kube-api-access-pghnl\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.111714 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/956de59f-5207-415b-af89-2a0ea2bf434e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.424350 4661 generic.go:334] "Generic (PLEG): container finished" podID="956de59f-5207-415b-af89-2a0ea2bf434e" containerID="df20f521b04d536a1b0fcc02312f941bb7d5b15793e69c9e18379e53812a4e17" exitCode=0 Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.424397 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x4fqt" event={"ID":"956de59f-5207-415b-af89-2a0ea2bf434e","Type":"ContainerDied","Data":"df20f521b04d536a1b0fcc02312f941bb7d5b15793e69c9e18379e53812a4e17"} Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.424406 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x4fqt" Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.424426 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x4fqt" event={"ID":"956de59f-5207-415b-af89-2a0ea2bf434e","Type":"ContainerDied","Data":"17e30c50292a48ee8e3d7a9830abc0325ab2855a542c7741b5d298e1eb1fe8ef"} Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.424448 4661 scope.go:117] "RemoveContainer" containerID="df20f521b04d536a1b0fcc02312f941bb7d5b15793e69c9e18379e53812a4e17" Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.463587 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-x4fqt"] Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.478523 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-x4fqt"] Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.482743 4661 scope.go:117] "RemoveContainer" containerID="ee4ead8ec0ce9a11aa2207bbb1f2931299818f37b3f95b9455d1da60da30560a" Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.512282 4661 scope.go:117] "RemoveContainer" containerID="88a11491315f2ccd288215554c6ecaba194faad7de62dcce30eb9fd4ec783eb9" Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.567009 4661 scope.go:117] "RemoveContainer" containerID="df20f521b04d536a1b0fcc02312f941bb7d5b15793e69c9e18379e53812a4e17" Oct 01 06:26:59 crc kubenswrapper[4661]: E1001 06:26:59.568024 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df20f521b04d536a1b0fcc02312f941bb7d5b15793e69c9e18379e53812a4e17\": container with ID starting with df20f521b04d536a1b0fcc02312f941bb7d5b15793e69c9e18379e53812a4e17 not found: ID does not exist" containerID="df20f521b04d536a1b0fcc02312f941bb7d5b15793e69c9e18379e53812a4e17" Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.568071 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df20f521b04d536a1b0fcc02312f941bb7d5b15793e69c9e18379e53812a4e17"} err="failed to get container status \"df20f521b04d536a1b0fcc02312f941bb7d5b15793e69c9e18379e53812a4e17\": rpc error: code = NotFound desc = could not find container \"df20f521b04d536a1b0fcc02312f941bb7d5b15793e69c9e18379e53812a4e17\": container with ID starting with df20f521b04d536a1b0fcc02312f941bb7d5b15793e69c9e18379e53812a4e17 not found: ID does not exist" Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.568105 4661 scope.go:117] "RemoveContainer" containerID="ee4ead8ec0ce9a11aa2207bbb1f2931299818f37b3f95b9455d1da60da30560a" Oct 01 06:26:59 crc kubenswrapper[4661]: E1001 06:26:59.568494 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee4ead8ec0ce9a11aa2207bbb1f2931299818f37b3f95b9455d1da60da30560a\": container with ID starting with ee4ead8ec0ce9a11aa2207bbb1f2931299818f37b3f95b9455d1da60da30560a not found: ID does not exist" containerID="ee4ead8ec0ce9a11aa2207bbb1f2931299818f37b3f95b9455d1da60da30560a" Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.568528 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee4ead8ec0ce9a11aa2207bbb1f2931299818f37b3f95b9455d1da60da30560a"} err="failed to get container status \"ee4ead8ec0ce9a11aa2207bbb1f2931299818f37b3f95b9455d1da60da30560a\": rpc error: code = NotFound desc = could not find container \"ee4ead8ec0ce9a11aa2207bbb1f2931299818f37b3f95b9455d1da60da30560a\": container with ID starting with ee4ead8ec0ce9a11aa2207bbb1f2931299818f37b3f95b9455d1da60da30560a not found: ID does not exist" Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.568551 4661 scope.go:117] "RemoveContainer" containerID="88a11491315f2ccd288215554c6ecaba194faad7de62dcce30eb9fd4ec783eb9" Oct 01 06:26:59 crc kubenswrapper[4661]: E1001 06:26:59.568783 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88a11491315f2ccd288215554c6ecaba194faad7de62dcce30eb9fd4ec783eb9\": container with ID starting with 88a11491315f2ccd288215554c6ecaba194faad7de62dcce30eb9fd4ec783eb9 not found: ID does not exist" containerID="88a11491315f2ccd288215554c6ecaba194faad7de62dcce30eb9fd4ec783eb9" Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.568806 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88a11491315f2ccd288215554c6ecaba194faad7de62dcce30eb9fd4ec783eb9"} err="failed to get container status \"88a11491315f2ccd288215554c6ecaba194faad7de62dcce30eb9fd4ec783eb9\": rpc error: code = NotFound desc = could not find container \"88a11491315f2ccd288215554c6ecaba194faad7de62dcce30eb9fd4ec783eb9\": container with ID starting with 88a11491315f2ccd288215554c6ecaba194faad7de62dcce30eb9fd4ec783eb9 not found: ID does not exist" Oct 01 06:26:59 crc kubenswrapper[4661]: I1001 06:26:59.778272 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="956de59f-5207-415b-af89-2a0ea2bf434e" path="/var/lib/kubelet/pods/956de59f-5207-415b-af89-2a0ea2bf434e/volumes" Oct 01 06:27:19 crc kubenswrapper[4661]: I1001 06:27:19.452511 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-q5xhq"] Oct 01 06:27:19 crc kubenswrapper[4661]: E1001 06:27:19.453734 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="956de59f-5207-415b-af89-2a0ea2bf434e" containerName="extract-content" Oct 01 06:27:19 crc kubenswrapper[4661]: I1001 06:27:19.453750 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="956de59f-5207-415b-af89-2a0ea2bf434e" containerName="extract-content" Oct 01 06:27:19 crc kubenswrapper[4661]: E1001 06:27:19.453777 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="956de59f-5207-415b-af89-2a0ea2bf434e" containerName="registry-server" Oct 01 06:27:19 crc kubenswrapper[4661]: I1001 06:27:19.453785 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="956de59f-5207-415b-af89-2a0ea2bf434e" containerName="registry-server" Oct 01 06:27:19 crc kubenswrapper[4661]: E1001 06:27:19.453810 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="956de59f-5207-415b-af89-2a0ea2bf434e" containerName="extract-utilities" Oct 01 06:27:19 crc kubenswrapper[4661]: I1001 06:27:19.453819 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="956de59f-5207-415b-af89-2a0ea2bf434e" containerName="extract-utilities" Oct 01 06:27:19 crc kubenswrapper[4661]: I1001 06:27:19.454051 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="956de59f-5207-415b-af89-2a0ea2bf434e" containerName="registry-server" Oct 01 06:27:19 crc kubenswrapper[4661]: I1001 06:27:19.455946 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q5xhq" Oct 01 06:27:19 crc kubenswrapper[4661]: I1001 06:27:19.476772 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q5xhq"] Oct 01 06:27:19 crc kubenswrapper[4661]: I1001 06:27:19.587935 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzdn9\" (UniqueName: \"kubernetes.io/projected/3edc6f1a-02ea-46db-9d48-abb0d8d35927-kube-api-access-nzdn9\") pod \"redhat-marketplace-q5xhq\" (UID: \"3edc6f1a-02ea-46db-9d48-abb0d8d35927\") " pod="openshift-marketplace/redhat-marketplace-q5xhq" Oct 01 06:27:19 crc kubenswrapper[4661]: I1001 06:27:19.588065 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3edc6f1a-02ea-46db-9d48-abb0d8d35927-utilities\") pod \"redhat-marketplace-q5xhq\" (UID: \"3edc6f1a-02ea-46db-9d48-abb0d8d35927\") " pod="openshift-marketplace/redhat-marketplace-q5xhq" Oct 01 06:27:19 crc kubenswrapper[4661]: I1001 06:27:19.588165 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3edc6f1a-02ea-46db-9d48-abb0d8d35927-catalog-content\") pod \"redhat-marketplace-q5xhq\" (UID: \"3edc6f1a-02ea-46db-9d48-abb0d8d35927\") " pod="openshift-marketplace/redhat-marketplace-q5xhq" Oct 01 06:27:19 crc kubenswrapper[4661]: I1001 06:27:19.691055 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3edc6f1a-02ea-46db-9d48-abb0d8d35927-catalog-content\") pod \"redhat-marketplace-q5xhq\" (UID: \"3edc6f1a-02ea-46db-9d48-abb0d8d35927\") " pod="openshift-marketplace/redhat-marketplace-q5xhq" Oct 01 06:27:19 crc kubenswrapper[4661]: I1001 06:27:19.691186 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzdn9\" (UniqueName: \"kubernetes.io/projected/3edc6f1a-02ea-46db-9d48-abb0d8d35927-kube-api-access-nzdn9\") pod \"redhat-marketplace-q5xhq\" (UID: \"3edc6f1a-02ea-46db-9d48-abb0d8d35927\") " pod="openshift-marketplace/redhat-marketplace-q5xhq" Oct 01 06:27:19 crc kubenswrapper[4661]: I1001 06:27:19.691264 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3edc6f1a-02ea-46db-9d48-abb0d8d35927-utilities\") pod \"redhat-marketplace-q5xhq\" (UID: \"3edc6f1a-02ea-46db-9d48-abb0d8d35927\") " pod="openshift-marketplace/redhat-marketplace-q5xhq" Oct 01 06:27:19 crc kubenswrapper[4661]: I1001 06:27:19.691678 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3edc6f1a-02ea-46db-9d48-abb0d8d35927-catalog-content\") pod \"redhat-marketplace-q5xhq\" (UID: \"3edc6f1a-02ea-46db-9d48-abb0d8d35927\") " pod="openshift-marketplace/redhat-marketplace-q5xhq" Oct 01 06:27:19 crc kubenswrapper[4661]: I1001 06:27:19.691712 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3edc6f1a-02ea-46db-9d48-abb0d8d35927-utilities\") pod \"redhat-marketplace-q5xhq\" (UID: \"3edc6f1a-02ea-46db-9d48-abb0d8d35927\") " pod="openshift-marketplace/redhat-marketplace-q5xhq" Oct 01 06:27:19 crc kubenswrapper[4661]: I1001 06:27:19.720514 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzdn9\" (UniqueName: \"kubernetes.io/projected/3edc6f1a-02ea-46db-9d48-abb0d8d35927-kube-api-access-nzdn9\") pod \"redhat-marketplace-q5xhq\" (UID: \"3edc6f1a-02ea-46db-9d48-abb0d8d35927\") " pod="openshift-marketplace/redhat-marketplace-q5xhq" Oct 01 06:27:19 crc kubenswrapper[4661]: I1001 06:27:19.784471 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q5xhq" Oct 01 06:27:20 crc kubenswrapper[4661]: I1001 06:27:20.278492 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q5xhq"] Oct 01 06:27:20 crc kubenswrapper[4661]: I1001 06:27:20.678280 4661 generic.go:334] "Generic (PLEG): container finished" podID="3edc6f1a-02ea-46db-9d48-abb0d8d35927" containerID="2661577c05a42071cc140cb5bd14f3def6a6beaf3316b7848ab455c03411c7e3" exitCode=0 Oct 01 06:27:20 crc kubenswrapper[4661]: I1001 06:27:20.678333 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5xhq" event={"ID":"3edc6f1a-02ea-46db-9d48-abb0d8d35927","Type":"ContainerDied","Data":"2661577c05a42071cc140cb5bd14f3def6a6beaf3316b7848ab455c03411c7e3"} Oct 01 06:27:20 crc kubenswrapper[4661]: I1001 06:27:20.678372 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5xhq" event={"ID":"3edc6f1a-02ea-46db-9d48-abb0d8d35927","Type":"ContainerStarted","Data":"5d031123c8a36cb2e018a773738d72aed568ac67ccedbba455639f8670b90a4c"} Oct 01 06:27:22 crc kubenswrapper[4661]: I1001 06:27:22.703306 4661 generic.go:334] "Generic (PLEG): container finished" podID="3edc6f1a-02ea-46db-9d48-abb0d8d35927" containerID="67c4c1f5385d4cbf00211718319de586d46e80574721f6618144be64efbcc334" exitCode=0 Oct 01 06:27:22 crc kubenswrapper[4661]: I1001 06:27:22.703400 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5xhq" event={"ID":"3edc6f1a-02ea-46db-9d48-abb0d8d35927","Type":"ContainerDied","Data":"67c4c1f5385d4cbf00211718319de586d46e80574721f6618144be64efbcc334"} Oct 01 06:27:23 crc kubenswrapper[4661]: I1001 06:27:23.717899 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5xhq" event={"ID":"3edc6f1a-02ea-46db-9d48-abb0d8d35927","Type":"ContainerStarted","Data":"cc7e4698b1d1b9ff98efe30156dc53fb0e1e2c74057b7a55eab500e21d509c70"} Oct 01 06:27:23 crc kubenswrapper[4661]: I1001 06:27:23.744852 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-q5xhq" podStartSLOduration=2.234987154 podStartE2EDuration="4.744826392s" podCreationTimestamp="2025-10-01 06:27:19 +0000 UTC" firstStartedPulling="2025-10-01 06:27:20.68051444 +0000 UTC m=+3489.618493064" lastFinishedPulling="2025-10-01 06:27:23.190353678 +0000 UTC m=+3492.128332302" observedRunningTime="2025-10-01 06:27:23.737480031 +0000 UTC m=+3492.675458655" watchObservedRunningTime="2025-10-01 06:27:23.744826392 +0000 UTC m=+3492.682805016" Oct 01 06:27:29 crc kubenswrapper[4661]: I1001 06:27:29.784898 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-q5xhq" Oct 01 06:27:29 crc kubenswrapper[4661]: I1001 06:27:29.785797 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-q5xhq" Oct 01 06:27:29 crc kubenswrapper[4661]: I1001 06:27:29.859599 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-q5xhq" Oct 01 06:27:29 crc kubenswrapper[4661]: I1001 06:27:29.929120 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-q5xhq" Oct 01 06:27:30 crc kubenswrapper[4661]: I1001 06:27:30.108930 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q5xhq"] Oct 01 06:27:31 crc kubenswrapper[4661]: I1001 06:27:31.820184 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-q5xhq" podUID="3edc6f1a-02ea-46db-9d48-abb0d8d35927" containerName="registry-server" containerID="cri-o://cc7e4698b1d1b9ff98efe30156dc53fb0e1e2c74057b7a55eab500e21d509c70" gracePeriod=2 Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.314427 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q5xhq" Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.401331 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3edc6f1a-02ea-46db-9d48-abb0d8d35927-catalog-content\") pod \"3edc6f1a-02ea-46db-9d48-abb0d8d35927\" (UID: \"3edc6f1a-02ea-46db-9d48-abb0d8d35927\") " Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.401370 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3edc6f1a-02ea-46db-9d48-abb0d8d35927-utilities\") pod \"3edc6f1a-02ea-46db-9d48-abb0d8d35927\" (UID: \"3edc6f1a-02ea-46db-9d48-abb0d8d35927\") " Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.401410 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzdn9\" (UniqueName: \"kubernetes.io/projected/3edc6f1a-02ea-46db-9d48-abb0d8d35927-kube-api-access-nzdn9\") pod \"3edc6f1a-02ea-46db-9d48-abb0d8d35927\" (UID: \"3edc6f1a-02ea-46db-9d48-abb0d8d35927\") " Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.403342 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3edc6f1a-02ea-46db-9d48-abb0d8d35927-utilities" (OuterVolumeSpecName: "utilities") pod "3edc6f1a-02ea-46db-9d48-abb0d8d35927" (UID: "3edc6f1a-02ea-46db-9d48-abb0d8d35927"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.411556 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3edc6f1a-02ea-46db-9d48-abb0d8d35927-kube-api-access-nzdn9" (OuterVolumeSpecName: "kube-api-access-nzdn9") pod "3edc6f1a-02ea-46db-9d48-abb0d8d35927" (UID: "3edc6f1a-02ea-46db-9d48-abb0d8d35927"). InnerVolumeSpecName "kube-api-access-nzdn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.415744 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3edc6f1a-02ea-46db-9d48-abb0d8d35927-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3edc6f1a-02ea-46db-9d48-abb0d8d35927" (UID: "3edc6f1a-02ea-46db-9d48-abb0d8d35927"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.503727 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3edc6f1a-02ea-46db-9d48-abb0d8d35927-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.504020 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3edc6f1a-02ea-46db-9d48-abb0d8d35927-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.504105 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzdn9\" (UniqueName: \"kubernetes.io/projected/3edc6f1a-02ea-46db-9d48-abb0d8d35927-kube-api-access-nzdn9\") on node \"crc\" DevicePath \"\"" Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.835810 4661 generic.go:334] "Generic (PLEG): container finished" podID="3edc6f1a-02ea-46db-9d48-abb0d8d35927" containerID="cc7e4698b1d1b9ff98efe30156dc53fb0e1e2c74057b7a55eab500e21d509c70" exitCode=0 Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.835925 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q5xhq" Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.835879 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5xhq" event={"ID":"3edc6f1a-02ea-46db-9d48-abb0d8d35927","Type":"ContainerDied","Data":"cc7e4698b1d1b9ff98efe30156dc53fb0e1e2c74057b7a55eab500e21d509c70"} Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.836096 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5xhq" event={"ID":"3edc6f1a-02ea-46db-9d48-abb0d8d35927","Type":"ContainerDied","Data":"5d031123c8a36cb2e018a773738d72aed568ac67ccedbba455639f8670b90a4c"} Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.836143 4661 scope.go:117] "RemoveContainer" containerID="cc7e4698b1d1b9ff98efe30156dc53fb0e1e2c74057b7a55eab500e21d509c70" Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.868795 4661 scope.go:117] "RemoveContainer" containerID="67c4c1f5385d4cbf00211718319de586d46e80574721f6618144be64efbcc334" Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.896036 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q5xhq"] Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.907393 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-q5xhq"] Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.907457 4661 scope.go:117] "RemoveContainer" containerID="2661577c05a42071cc140cb5bd14f3def6a6beaf3316b7848ab455c03411c7e3" Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.969665 4661 scope.go:117] "RemoveContainer" containerID="cc7e4698b1d1b9ff98efe30156dc53fb0e1e2c74057b7a55eab500e21d509c70" Oct 01 06:27:32 crc kubenswrapper[4661]: E1001 06:27:32.970178 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc7e4698b1d1b9ff98efe30156dc53fb0e1e2c74057b7a55eab500e21d509c70\": container with ID starting with cc7e4698b1d1b9ff98efe30156dc53fb0e1e2c74057b7a55eab500e21d509c70 not found: ID does not exist" containerID="cc7e4698b1d1b9ff98efe30156dc53fb0e1e2c74057b7a55eab500e21d509c70" Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.970239 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc7e4698b1d1b9ff98efe30156dc53fb0e1e2c74057b7a55eab500e21d509c70"} err="failed to get container status \"cc7e4698b1d1b9ff98efe30156dc53fb0e1e2c74057b7a55eab500e21d509c70\": rpc error: code = NotFound desc = could not find container \"cc7e4698b1d1b9ff98efe30156dc53fb0e1e2c74057b7a55eab500e21d509c70\": container with ID starting with cc7e4698b1d1b9ff98efe30156dc53fb0e1e2c74057b7a55eab500e21d509c70 not found: ID does not exist" Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.970279 4661 scope.go:117] "RemoveContainer" containerID="67c4c1f5385d4cbf00211718319de586d46e80574721f6618144be64efbcc334" Oct 01 06:27:32 crc kubenswrapper[4661]: E1001 06:27:32.970735 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67c4c1f5385d4cbf00211718319de586d46e80574721f6618144be64efbcc334\": container with ID starting with 67c4c1f5385d4cbf00211718319de586d46e80574721f6618144be64efbcc334 not found: ID does not exist" containerID="67c4c1f5385d4cbf00211718319de586d46e80574721f6618144be64efbcc334" Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.970815 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67c4c1f5385d4cbf00211718319de586d46e80574721f6618144be64efbcc334"} err="failed to get container status \"67c4c1f5385d4cbf00211718319de586d46e80574721f6618144be64efbcc334\": rpc error: code = NotFound desc = could not find container \"67c4c1f5385d4cbf00211718319de586d46e80574721f6618144be64efbcc334\": container with ID starting with 67c4c1f5385d4cbf00211718319de586d46e80574721f6618144be64efbcc334 not found: ID does not exist" Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.970865 4661 scope.go:117] "RemoveContainer" containerID="2661577c05a42071cc140cb5bd14f3def6a6beaf3316b7848ab455c03411c7e3" Oct 01 06:27:32 crc kubenswrapper[4661]: E1001 06:27:32.971426 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2661577c05a42071cc140cb5bd14f3def6a6beaf3316b7848ab455c03411c7e3\": container with ID starting with 2661577c05a42071cc140cb5bd14f3def6a6beaf3316b7848ab455c03411c7e3 not found: ID does not exist" containerID="2661577c05a42071cc140cb5bd14f3def6a6beaf3316b7848ab455c03411c7e3" Oct 01 06:27:32 crc kubenswrapper[4661]: I1001 06:27:32.971488 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2661577c05a42071cc140cb5bd14f3def6a6beaf3316b7848ab455c03411c7e3"} err="failed to get container status \"2661577c05a42071cc140cb5bd14f3def6a6beaf3316b7848ab455c03411c7e3\": rpc error: code = NotFound desc = could not find container \"2661577c05a42071cc140cb5bd14f3def6a6beaf3316b7848ab455c03411c7e3\": container with ID starting with 2661577c05a42071cc140cb5bd14f3def6a6beaf3316b7848ab455c03411c7e3 not found: ID does not exist" Oct 01 06:27:33 crc kubenswrapper[4661]: I1001 06:27:33.776260 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3edc6f1a-02ea-46db-9d48-abb0d8d35927" path="/var/lib/kubelet/pods/3edc6f1a-02ea-46db-9d48-abb0d8d35927/volumes" Oct 01 06:27:42 crc kubenswrapper[4661]: I1001 06:27:42.966276 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vxrgt"] Oct 01 06:27:42 crc kubenswrapper[4661]: E1001 06:27:42.968190 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3edc6f1a-02ea-46db-9d48-abb0d8d35927" containerName="extract-utilities" Oct 01 06:27:42 crc kubenswrapper[4661]: I1001 06:27:42.968226 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="3edc6f1a-02ea-46db-9d48-abb0d8d35927" containerName="extract-utilities" Oct 01 06:27:42 crc kubenswrapper[4661]: E1001 06:27:42.968270 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3edc6f1a-02ea-46db-9d48-abb0d8d35927" containerName="registry-server" Oct 01 06:27:42 crc kubenswrapper[4661]: I1001 06:27:42.968286 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="3edc6f1a-02ea-46db-9d48-abb0d8d35927" containerName="registry-server" Oct 01 06:27:42 crc kubenswrapper[4661]: E1001 06:27:42.968322 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3edc6f1a-02ea-46db-9d48-abb0d8d35927" containerName="extract-content" Oct 01 06:27:42 crc kubenswrapper[4661]: I1001 06:27:42.968339 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="3edc6f1a-02ea-46db-9d48-abb0d8d35927" containerName="extract-content" Oct 01 06:27:42 crc kubenswrapper[4661]: I1001 06:27:42.968886 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="3edc6f1a-02ea-46db-9d48-abb0d8d35927" containerName="registry-server" Oct 01 06:27:42 crc kubenswrapper[4661]: I1001 06:27:42.973056 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vxrgt" Oct 01 06:27:43 crc kubenswrapper[4661]: I1001 06:27:43.015304 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vxrgt"] Oct 01 06:27:43 crc kubenswrapper[4661]: I1001 06:27:43.039078 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6742a589-c16e-4504-8b5c-28b13d61e9d3-utilities\") pod \"certified-operators-vxrgt\" (UID: \"6742a589-c16e-4504-8b5c-28b13d61e9d3\") " pod="openshift-marketplace/certified-operators-vxrgt" Oct 01 06:27:43 crc kubenswrapper[4661]: I1001 06:27:43.039183 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6742a589-c16e-4504-8b5c-28b13d61e9d3-catalog-content\") pod \"certified-operators-vxrgt\" (UID: \"6742a589-c16e-4504-8b5c-28b13d61e9d3\") " pod="openshift-marketplace/certified-operators-vxrgt" Oct 01 06:27:43 crc kubenswrapper[4661]: I1001 06:27:43.039323 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6tt7\" (UniqueName: \"kubernetes.io/projected/6742a589-c16e-4504-8b5c-28b13d61e9d3-kube-api-access-d6tt7\") pod \"certified-operators-vxrgt\" (UID: \"6742a589-c16e-4504-8b5c-28b13d61e9d3\") " pod="openshift-marketplace/certified-operators-vxrgt" Oct 01 06:27:43 crc kubenswrapper[4661]: I1001 06:27:43.140944 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6742a589-c16e-4504-8b5c-28b13d61e9d3-utilities\") pod \"certified-operators-vxrgt\" (UID: \"6742a589-c16e-4504-8b5c-28b13d61e9d3\") " pod="openshift-marketplace/certified-operators-vxrgt" Oct 01 06:27:43 crc kubenswrapper[4661]: I1001 06:27:43.141023 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6742a589-c16e-4504-8b5c-28b13d61e9d3-catalog-content\") pod \"certified-operators-vxrgt\" (UID: \"6742a589-c16e-4504-8b5c-28b13d61e9d3\") " pod="openshift-marketplace/certified-operators-vxrgt" Oct 01 06:27:43 crc kubenswrapper[4661]: I1001 06:27:43.141147 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6tt7\" (UniqueName: \"kubernetes.io/projected/6742a589-c16e-4504-8b5c-28b13d61e9d3-kube-api-access-d6tt7\") pod \"certified-operators-vxrgt\" (UID: \"6742a589-c16e-4504-8b5c-28b13d61e9d3\") " pod="openshift-marketplace/certified-operators-vxrgt" Oct 01 06:27:43 crc kubenswrapper[4661]: I1001 06:27:43.141812 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6742a589-c16e-4504-8b5c-28b13d61e9d3-catalog-content\") pod \"certified-operators-vxrgt\" (UID: \"6742a589-c16e-4504-8b5c-28b13d61e9d3\") " pod="openshift-marketplace/certified-operators-vxrgt" Oct 01 06:27:43 crc kubenswrapper[4661]: I1001 06:27:43.142762 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6742a589-c16e-4504-8b5c-28b13d61e9d3-utilities\") pod \"certified-operators-vxrgt\" (UID: \"6742a589-c16e-4504-8b5c-28b13d61e9d3\") " pod="openshift-marketplace/certified-operators-vxrgt" Oct 01 06:27:43 crc kubenswrapper[4661]: I1001 06:27:43.168698 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6tt7\" (UniqueName: \"kubernetes.io/projected/6742a589-c16e-4504-8b5c-28b13d61e9d3-kube-api-access-d6tt7\") pod \"certified-operators-vxrgt\" (UID: \"6742a589-c16e-4504-8b5c-28b13d61e9d3\") " pod="openshift-marketplace/certified-operators-vxrgt" Oct 01 06:27:43 crc kubenswrapper[4661]: I1001 06:27:43.322404 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vxrgt" Oct 01 06:27:43 crc kubenswrapper[4661]: I1001 06:27:43.803096 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vxrgt"] Oct 01 06:27:43 crc kubenswrapper[4661]: I1001 06:27:43.996983 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vxrgt" event={"ID":"6742a589-c16e-4504-8b5c-28b13d61e9d3","Type":"ContainerStarted","Data":"25c078bbfe91097002b7bed85b8aa45d9b032eb12d38eecd68aae0a6989466b2"} Oct 01 06:27:45 crc kubenswrapper[4661]: I1001 06:27:45.014338 4661 generic.go:334] "Generic (PLEG): container finished" podID="6742a589-c16e-4504-8b5c-28b13d61e9d3" containerID="6d124e3e56e4bf7257f257b847538c38c4a1e0854fec623a9c2cecdaf2bab741" exitCode=0 Oct 01 06:27:45 crc kubenswrapper[4661]: I1001 06:27:45.014822 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vxrgt" event={"ID":"6742a589-c16e-4504-8b5c-28b13d61e9d3","Type":"ContainerDied","Data":"6d124e3e56e4bf7257f257b847538c38c4a1e0854fec623a9c2cecdaf2bab741"} Oct 01 06:27:46 crc kubenswrapper[4661]: I1001 06:27:46.030502 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vxrgt" event={"ID":"6742a589-c16e-4504-8b5c-28b13d61e9d3","Type":"ContainerStarted","Data":"0d25a3737f2c96bcf3f74ff6b8a1d7d2e80754356fa7fcd73a041cc6d8b44aa4"} Oct 01 06:27:48 crc kubenswrapper[4661]: I1001 06:27:48.057468 4661 generic.go:334] "Generic (PLEG): container finished" podID="6742a589-c16e-4504-8b5c-28b13d61e9d3" containerID="0d25a3737f2c96bcf3f74ff6b8a1d7d2e80754356fa7fcd73a041cc6d8b44aa4" exitCode=0 Oct 01 06:27:48 crc kubenswrapper[4661]: I1001 06:27:48.057554 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vxrgt" event={"ID":"6742a589-c16e-4504-8b5c-28b13d61e9d3","Type":"ContainerDied","Data":"0d25a3737f2c96bcf3f74ff6b8a1d7d2e80754356fa7fcd73a041cc6d8b44aa4"} Oct 01 06:27:49 crc kubenswrapper[4661]: I1001 06:27:49.070807 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vxrgt" event={"ID":"6742a589-c16e-4504-8b5c-28b13d61e9d3","Type":"ContainerStarted","Data":"d2936e7746f24e020e0136fc7236b3e0d4ec03194461d4da70c7a8d72e0ca576"} Oct 01 06:27:49 crc kubenswrapper[4661]: I1001 06:27:49.100876 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vxrgt" podStartSLOduration=3.630130163 podStartE2EDuration="7.100859489s" podCreationTimestamp="2025-10-01 06:27:42 +0000 UTC" firstStartedPulling="2025-10-01 06:27:45.017416377 +0000 UTC m=+3513.955395041" lastFinishedPulling="2025-10-01 06:27:48.488145753 +0000 UTC m=+3517.426124367" observedRunningTime="2025-10-01 06:27:49.093364014 +0000 UTC m=+3518.031342668" watchObservedRunningTime="2025-10-01 06:27:49.100859489 +0000 UTC m=+3518.038838103" Oct 01 06:27:53 crc kubenswrapper[4661]: I1001 06:27:53.324006 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vxrgt" Oct 01 06:27:53 crc kubenswrapper[4661]: I1001 06:27:53.325153 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vxrgt" Oct 01 06:27:53 crc kubenswrapper[4661]: I1001 06:27:53.392508 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vxrgt" Oct 01 06:27:54 crc kubenswrapper[4661]: I1001 06:27:54.192289 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vxrgt" Oct 01 06:27:54 crc kubenswrapper[4661]: I1001 06:27:54.255212 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vxrgt"] Oct 01 06:27:56 crc kubenswrapper[4661]: I1001 06:27:56.150333 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vxrgt" podUID="6742a589-c16e-4504-8b5c-28b13d61e9d3" containerName="registry-server" containerID="cri-o://d2936e7746f24e020e0136fc7236b3e0d4ec03194461d4da70c7a8d72e0ca576" gracePeriod=2 Oct 01 06:27:57 crc kubenswrapper[4661]: I1001 06:27:57.163219 4661 generic.go:334] "Generic (PLEG): container finished" podID="6742a589-c16e-4504-8b5c-28b13d61e9d3" containerID="d2936e7746f24e020e0136fc7236b3e0d4ec03194461d4da70c7a8d72e0ca576" exitCode=0 Oct 01 06:27:57 crc kubenswrapper[4661]: I1001 06:27:57.163333 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vxrgt" event={"ID":"6742a589-c16e-4504-8b5c-28b13d61e9d3","Type":"ContainerDied","Data":"d2936e7746f24e020e0136fc7236b3e0d4ec03194461d4da70c7a8d72e0ca576"} Oct 01 06:27:57 crc kubenswrapper[4661]: I1001 06:27:57.865285 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vxrgt" Oct 01 06:27:57 crc kubenswrapper[4661]: I1001 06:27:57.973152 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6742a589-c16e-4504-8b5c-28b13d61e9d3-utilities\") pod \"6742a589-c16e-4504-8b5c-28b13d61e9d3\" (UID: \"6742a589-c16e-4504-8b5c-28b13d61e9d3\") " Oct 01 06:27:57 crc kubenswrapper[4661]: I1001 06:27:57.973428 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6tt7\" (UniqueName: \"kubernetes.io/projected/6742a589-c16e-4504-8b5c-28b13d61e9d3-kube-api-access-d6tt7\") pod \"6742a589-c16e-4504-8b5c-28b13d61e9d3\" (UID: \"6742a589-c16e-4504-8b5c-28b13d61e9d3\") " Oct 01 06:27:57 crc kubenswrapper[4661]: I1001 06:27:57.973504 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6742a589-c16e-4504-8b5c-28b13d61e9d3-catalog-content\") pod \"6742a589-c16e-4504-8b5c-28b13d61e9d3\" (UID: \"6742a589-c16e-4504-8b5c-28b13d61e9d3\") " Oct 01 06:27:57 crc kubenswrapper[4661]: I1001 06:27:57.974037 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6742a589-c16e-4504-8b5c-28b13d61e9d3-utilities" (OuterVolumeSpecName: "utilities") pod "6742a589-c16e-4504-8b5c-28b13d61e9d3" (UID: "6742a589-c16e-4504-8b5c-28b13d61e9d3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:27:57 crc kubenswrapper[4661]: I1001 06:27:57.974489 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6742a589-c16e-4504-8b5c-28b13d61e9d3-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:27:57 crc kubenswrapper[4661]: I1001 06:27:57.985127 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6742a589-c16e-4504-8b5c-28b13d61e9d3-kube-api-access-d6tt7" (OuterVolumeSpecName: "kube-api-access-d6tt7") pod "6742a589-c16e-4504-8b5c-28b13d61e9d3" (UID: "6742a589-c16e-4504-8b5c-28b13d61e9d3"). InnerVolumeSpecName "kube-api-access-d6tt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:27:58 crc kubenswrapper[4661]: I1001 06:27:58.021216 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6742a589-c16e-4504-8b5c-28b13d61e9d3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6742a589-c16e-4504-8b5c-28b13d61e9d3" (UID: "6742a589-c16e-4504-8b5c-28b13d61e9d3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:27:58 crc kubenswrapper[4661]: I1001 06:27:58.076447 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6742a589-c16e-4504-8b5c-28b13d61e9d3-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:27:58 crc kubenswrapper[4661]: I1001 06:27:58.076479 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6tt7\" (UniqueName: \"kubernetes.io/projected/6742a589-c16e-4504-8b5c-28b13d61e9d3-kube-api-access-d6tt7\") on node \"crc\" DevicePath \"\"" Oct 01 06:27:58 crc kubenswrapper[4661]: I1001 06:27:58.179458 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vxrgt" event={"ID":"6742a589-c16e-4504-8b5c-28b13d61e9d3","Type":"ContainerDied","Data":"25c078bbfe91097002b7bed85b8aa45d9b032eb12d38eecd68aae0a6989466b2"} Oct 01 06:27:58 crc kubenswrapper[4661]: I1001 06:27:58.179512 4661 scope.go:117] "RemoveContainer" containerID="d2936e7746f24e020e0136fc7236b3e0d4ec03194461d4da70c7a8d72e0ca576" Oct 01 06:27:58 crc kubenswrapper[4661]: I1001 06:27:58.179540 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vxrgt" Oct 01 06:27:58 crc kubenswrapper[4661]: I1001 06:27:58.220876 4661 scope.go:117] "RemoveContainer" containerID="0d25a3737f2c96bcf3f74ff6b8a1d7d2e80754356fa7fcd73a041cc6d8b44aa4" Oct 01 06:27:58 crc kubenswrapper[4661]: I1001 06:27:58.235778 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vxrgt"] Oct 01 06:27:58 crc kubenswrapper[4661]: I1001 06:27:58.243610 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vxrgt"] Oct 01 06:27:58 crc kubenswrapper[4661]: I1001 06:27:58.247946 4661 scope.go:117] "RemoveContainer" containerID="6d124e3e56e4bf7257f257b847538c38c4a1e0854fec623a9c2cecdaf2bab741" Oct 01 06:27:59 crc kubenswrapper[4661]: I1001 06:27:59.772029 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6742a589-c16e-4504-8b5c-28b13d61e9d3" path="/var/lib/kubelet/pods/6742a589-c16e-4504-8b5c-28b13d61e9d3/volumes" Oct 01 06:28:04 crc kubenswrapper[4661]: I1001 06:28:04.309839 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:28:04 crc kubenswrapper[4661]: I1001 06:28:04.310415 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:28:34 crc kubenswrapper[4661]: I1001 06:28:34.308934 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:28:34 crc kubenswrapper[4661]: I1001 06:28:34.309752 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:29:04 crc kubenswrapper[4661]: I1001 06:29:04.309553 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:29:04 crc kubenswrapper[4661]: I1001 06:29:04.310154 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:29:04 crc kubenswrapper[4661]: I1001 06:29:04.310193 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 06:29:04 crc kubenswrapper[4661]: I1001 06:29:04.310726 4661 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e"} pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 06:29:04 crc kubenswrapper[4661]: I1001 06:29:04.310777 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" containerID="cri-o://411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" gracePeriod=600 Oct 01 06:29:04 crc kubenswrapper[4661]: E1001 06:29:04.472007 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:29:04 crc kubenswrapper[4661]: I1001 06:29:04.961691 4661 generic.go:334] "Generic (PLEG): container finished" podID="7584c4bc-4202-487e-a2b4-4319f428a792" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" exitCode=0 Oct 01 06:29:04 crc kubenswrapper[4661]: I1001 06:29:04.962056 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerDied","Data":"411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e"} Oct 01 06:29:04 crc kubenswrapper[4661]: I1001 06:29:04.962094 4661 scope.go:117] "RemoveContainer" containerID="ee8b9b6ed03e4eeb9e88b6d3ab66a07f435540b1756267f19970906565ed843e" Oct 01 06:29:04 crc kubenswrapper[4661]: I1001 06:29:04.962859 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:29:04 crc kubenswrapper[4661]: E1001 06:29:04.963148 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:29:19 crc kubenswrapper[4661]: I1001 06:29:19.757656 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:29:19 crc kubenswrapper[4661]: E1001 06:29:19.758522 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:29:33 crc kubenswrapper[4661]: I1001 06:29:33.758421 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:29:33 crc kubenswrapper[4661]: E1001 06:29:33.759533 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:29:44 crc kubenswrapper[4661]: I1001 06:29:44.757442 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:29:44 crc kubenswrapper[4661]: E1001 06:29:44.760027 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:29:59 crc kubenswrapper[4661]: I1001 06:29:59.758832 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:29:59 crc kubenswrapper[4661]: E1001 06:29:59.759957 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:30:00 crc kubenswrapper[4661]: I1001 06:30:00.188290 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw"] Oct 01 06:30:00 crc kubenswrapper[4661]: E1001 06:30:00.188831 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6742a589-c16e-4504-8b5c-28b13d61e9d3" containerName="extract-content" Oct 01 06:30:00 crc kubenswrapper[4661]: I1001 06:30:00.188853 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6742a589-c16e-4504-8b5c-28b13d61e9d3" containerName="extract-content" Oct 01 06:30:00 crc kubenswrapper[4661]: E1001 06:30:00.188868 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6742a589-c16e-4504-8b5c-28b13d61e9d3" containerName="extract-utilities" Oct 01 06:30:00 crc kubenswrapper[4661]: I1001 06:30:00.188877 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6742a589-c16e-4504-8b5c-28b13d61e9d3" containerName="extract-utilities" Oct 01 06:30:00 crc kubenswrapper[4661]: E1001 06:30:00.188906 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6742a589-c16e-4504-8b5c-28b13d61e9d3" containerName="registry-server" Oct 01 06:30:00 crc kubenswrapper[4661]: I1001 06:30:00.188915 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6742a589-c16e-4504-8b5c-28b13d61e9d3" containerName="registry-server" Oct 01 06:30:00 crc kubenswrapper[4661]: I1001 06:30:00.189175 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="6742a589-c16e-4504-8b5c-28b13d61e9d3" containerName="registry-server" Oct 01 06:30:00 crc kubenswrapper[4661]: I1001 06:30:00.190163 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw" Oct 01 06:30:00 crc kubenswrapper[4661]: I1001 06:30:00.196300 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 06:30:00 crc kubenswrapper[4661]: I1001 06:30:00.198316 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 06:30:00 crc kubenswrapper[4661]: I1001 06:30:00.210648 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw"] Oct 01 06:30:00 crc kubenswrapper[4661]: I1001 06:30:00.269787 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gt7pz\" (UniqueName: \"kubernetes.io/projected/bc3c5e7f-a131-4f27-8c62-57083890017a-kube-api-access-gt7pz\") pod \"collect-profiles-29321670-vllbw\" (UID: \"bc3c5e7f-a131-4f27-8c62-57083890017a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw" Oct 01 06:30:00 crc kubenswrapper[4661]: I1001 06:30:00.269836 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bc3c5e7f-a131-4f27-8c62-57083890017a-secret-volume\") pod \"collect-profiles-29321670-vllbw\" (UID: \"bc3c5e7f-a131-4f27-8c62-57083890017a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw" Oct 01 06:30:00 crc kubenswrapper[4661]: I1001 06:30:00.270153 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bc3c5e7f-a131-4f27-8c62-57083890017a-config-volume\") pod \"collect-profiles-29321670-vllbw\" (UID: \"bc3c5e7f-a131-4f27-8c62-57083890017a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw" Oct 01 06:30:00 crc kubenswrapper[4661]: I1001 06:30:00.372805 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bc3c5e7f-a131-4f27-8c62-57083890017a-config-volume\") pod \"collect-profiles-29321670-vllbw\" (UID: \"bc3c5e7f-a131-4f27-8c62-57083890017a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw" Oct 01 06:30:00 crc kubenswrapper[4661]: I1001 06:30:00.373059 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gt7pz\" (UniqueName: \"kubernetes.io/projected/bc3c5e7f-a131-4f27-8c62-57083890017a-kube-api-access-gt7pz\") pod \"collect-profiles-29321670-vllbw\" (UID: \"bc3c5e7f-a131-4f27-8c62-57083890017a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw" Oct 01 06:30:00 crc kubenswrapper[4661]: I1001 06:30:00.373151 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bc3c5e7f-a131-4f27-8c62-57083890017a-secret-volume\") pod \"collect-profiles-29321670-vllbw\" (UID: \"bc3c5e7f-a131-4f27-8c62-57083890017a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw" Oct 01 06:30:00 crc kubenswrapper[4661]: I1001 06:30:00.375214 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bc3c5e7f-a131-4f27-8c62-57083890017a-config-volume\") pod \"collect-profiles-29321670-vllbw\" (UID: \"bc3c5e7f-a131-4f27-8c62-57083890017a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw" Oct 01 06:30:00 crc kubenswrapper[4661]: I1001 06:30:00.385737 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bc3c5e7f-a131-4f27-8c62-57083890017a-secret-volume\") pod \"collect-profiles-29321670-vllbw\" (UID: \"bc3c5e7f-a131-4f27-8c62-57083890017a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw" Oct 01 06:30:00 crc kubenswrapper[4661]: I1001 06:30:00.392388 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gt7pz\" (UniqueName: \"kubernetes.io/projected/bc3c5e7f-a131-4f27-8c62-57083890017a-kube-api-access-gt7pz\") pod \"collect-profiles-29321670-vllbw\" (UID: \"bc3c5e7f-a131-4f27-8c62-57083890017a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw" Oct 01 06:30:00 crc kubenswrapper[4661]: I1001 06:30:00.513142 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw" Oct 01 06:30:00 crc kubenswrapper[4661]: I1001 06:30:00.994351 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw"] Oct 01 06:30:01 crc kubenswrapper[4661]: I1001 06:30:01.614561 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw" event={"ID":"bc3c5e7f-a131-4f27-8c62-57083890017a","Type":"ContainerStarted","Data":"11af60f107021aa9135b7cbe5117d6777ef7cedbf28bde0bb12107b47cd0a000"} Oct 01 06:30:01 crc kubenswrapper[4661]: I1001 06:30:01.614994 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw" event={"ID":"bc3c5e7f-a131-4f27-8c62-57083890017a","Type":"ContainerStarted","Data":"7a8f529507c2fd3c169bdca07b86266a3e5d5a980ad0dfd03a3aa4e91132c6c2"} Oct 01 06:30:01 crc kubenswrapper[4661]: I1001 06:30:01.634688 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw" podStartSLOduration=1.634668338 podStartE2EDuration="1.634668338s" podCreationTimestamp="2025-10-01 06:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:30:01.629355943 +0000 UTC m=+3650.567334557" watchObservedRunningTime="2025-10-01 06:30:01.634668338 +0000 UTC m=+3650.572646962" Oct 01 06:30:02 crc kubenswrapper[4661]: I1001 06:30:02.625925 4661 generic.go:334] "Generic (PLEG): container finished" podID="bc3c5e7f-a131-4f27-8c62-57083890017a" containerID="11af60f107021aa9135b7cbe5117d6777ef7cedbf28bde0bb12107b47cd0a000" exitCode=0 Oct 01 06:30:02 crc kubenswrapper[4661]: I1001 06:30:02.626012 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw" event={"ID":"bc3c5e7f-a131-4f27-8c62-57083890017a","Type":"ContainerDied","Data":"11af60f107021aa9135b7cbe5117d6777ef7cedbf28bde0bb12107b47cd0a000"} Oct 01 06:30:04 crc kubenswrapper[4661]: I1001 06:30:04.007047 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw" Oct 01 06:30:04 crc kubenswrapper[4661]: I1001 06:30:04.050912 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gt7pz\" (UniqueName: \"kubernetes.io/projected/bc3c5e7f-a131-4f27-8c62-57083890017a-kube-api-access-gt7pz\") pod \"bc3c5e7f-a131-4f27-8c62-57083890017a\" (UID: \"bc3c5e7f-a131-4f27-8c62-57083890017a\") " Oct 01 06:30:04 crc kubenswrapper[4661]: I1001 06:30:04.051193 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bc3c5e7f-a131-4f27-8c62-57083890017a-config-volume\") pod \"bc3c5e7f-a131-4f27-8c62-57083890017a\" (UID: \"bc3c5e7f-a131-4f27-8c62-57083890017a\") " Oct 01 06:30:04 crc kubenswrapper[4661]: I1001 06:30:04.051279 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bc3c5e7f-a131-4f27-8c62-57083890017a-secret-volume\") pod \"bc3c5e7f-a131-4f27-8c62-57083890017a\" (UID: \"bc3c5e7f-a131-4f27-8c62-57083890017a\") " Oct 01 06:30:04 crc kubenswrapper[4661]: I1001 06:30:04.052061 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc3c5e7f-a131-4f27-8c62-57083890017a-config-volume" (OuterVolumeSpecName: "config-volume") pod "bc3c5e7f-a131-4f27-8c62-57083890017a" (UID: "bc3c5e7f-a131-4f27-8c62-57083890017a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:30:04 crc kubenswrapper[4661]: I1001 06:30:04.058486 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc3c5e7f-a131-4f27-8c62-57083890017a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "bc3c5e7f-a131-4f27-8c62-57083890017a" (UID: "bc3c5e7f-a131-4f27-8c62-57083890017a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:30:04 crc kubenswrapper[4661]: I1001 06:30:04.058702 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc3c5e7f-a131-4f27-8c62-57083890017a-kube-api-access-gt7pz" (OuterVolumeSpecName: "kube-api-access-gt7pz") pod "bc3c5e7f-a131-4f27-8c62-57083890017a" (UID: "bc3c5e7f-a131-4f27-8c62-57083890017a"). InnerVolumeSpecName "kube-api-access-gt7pz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:30:04 crc kubenswrapper[4661]: I1001 06:30:04.154560 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gt7pz\" (UniqueName: \"kubernetes.io/projected/bc3c5e7f-a131-4f27-8c62-57083890017a-kube-api-access-gt7pz\") on node \"crc\" DevicePath \"\"" Oct 01 06:30:04 crc kubenswrapper[4661]: I1001 06:30:04.155051 4661 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bc3c5e7f-a131-4f27-8c62-57083890017a-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 06:30:04 crc kubenswrapper[4661]: I1001 06:30:04.155077 4661 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bc3c5e7f-a131-4f27-8c62-57083890017a-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 06:30:04 crc kubenswrapper[4661]: I1001 06:30:04.656217 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw" event={"ID":"bc3c5e7f-a131-4f27-8c62-57083890017a","Type":"ContainerDied","Data":"7a8f529507c2fd3c169bdca07b86266a3e5d5a980ad0dfd03a3aa4e91132c6c2"} Oct 01 06:30:04 crc kubenswrapper[4661]: I1001 06:30:04.656505 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a8f529507c2fd3c169bdca07b86266a3e5d5a980ad0dfd03a3aa4e91132c6c2" Oct 01 06:30:04 crc kubenswrapper[4661]: I1001 06:30:04.656805 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw" Oct 01 06:30:04 crc kubenswrapper[4661]: I1001 06:30:04.722753 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v"] Oct 01 06:30:04 crc kubenswrapper[4661]: I1001 06:30:04.733514 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321625-jtv2v"] Oct 01 06:30:05 crc kubenswrapper[4661]: I1001 06:30:05.795179 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8716dca9-1210-480b-b460-e41071589e9d" path="/var/lib/kubelet/pods/8716dca9-1210-480b-b460-e41071589e9d/volumes" Oct 01 06:30:14 crc kubenswrapper[4661]: I1001 06:30:14.758047 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:30:14 crc kubenswrapper[4661]: E1001 06:30:14.758959 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:30:23 crc kubenswrapper[4661]: I1001 06:30:23.526853 4661 scope.go:117] "RemoveContainer" containerID="68cf0ad20777a84bfa7e08c8f0e2e1e30ed1125df8e0fab7e1d96f14773a18e5" Oct 01 06:30:28 crc kubenswrapper[4661]: I1001 06:30:28.757291 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:30:28 crc kubenswrapper[4661]: E1001 06:30:28.758291 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:30:42 crc kubenswrapper[4661]: I1001 06:30:42.757726 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:30:42 crc kubenswrapper[4661]: E1001 06:30:42.758955 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:30:55 crc kubenswrapper[4661]: I1001 06:30:55.757144 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:30:55 crc kubenswrapper[4661]: E1001 06:30:55.757963 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:31:07 crc kubenswrapper[4661]: I1001 06:31:07.758766 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:31:07 crc kubenswrapper[4661]: E1001 06:31:07.759888 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:31:19 crc kubenswrapper[4661]: I1001 06:31:19.758311 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:31:19 crc kubenswrapper[4661]: E1001 06:31:19.759919 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:31:34 crc kubenswrapper[4661]: I1001 06:31:34.756831 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:31:34 crc kubenswrapper[4661]: E1001 06:31:34.759182 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:31:48 crc kubenswrapper[4661]: I1001 06:31:48.757906 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:31:48 crc kubenswrapper[4661]: E1001 06:31:48.759139 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:32:02 crc kubenswrapper[4661]: I1001 06:32:02.757321 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:32:02 crc kubenswrapper[4661]: E1001 06:32:02.758224 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:32:13 crc kubenswrapper[4661]: I1001 06:32:13.757480 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:32:13 crc kubenswrapper[4661]: E1001 06:32:13.758922 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:32:27 crc kubenswrapper[4661]: I1001 06:32:27.757668 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:32:27 crc kubenswrapper[4661]: E1001 06:32:27.758499 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:32:39 crc kubenswrapper[4661]: I1001 06:32:39.757404 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:32:39 crc kubenswrapper[4661]: E1001 06:32:39.758412 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:32:53 crc kubenswrapper[4661]: I1001 06:32:53.757870 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:32:53 crc kubenswrapper[4661]: E1001 06:32:53.759193 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:33:06 crc kubenswrapper[4661]: I1001 06:33:06.756506 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:33:06 crc kubenswrapper[4661]: E1001 06:33:06.757518 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:33:17 crc kubenswrapper[4661]: I1001 06:33:17.758494 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:33:17 crc kubenswrapper[4661]: E1001 06:33:17.759219 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:33:31 crc kubenswrapper[4661]: I1001 06:33:31.772166 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:33:31 crc kubenswrapper[4661]: E1001 06:33:31.773470 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:33:45 crc kubenswrapper[4661]: I1001 06:33:45.758012 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:33:45 crc kubenswrapper[4661]: E1001 06:33:45.758758 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:33:56 crc kubenswrapper[4661]: I1001 06:33:56.757567 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:33:56 crc kubenswrapper[4661]: E1001 06:33:56.758224 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:34:07 crc kubenswrapper[4661]: I1001 06:34:07.758403 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:34:08 crc kubenswrapper[4661]: I1001 06:34:08.504709 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"908374508ae1b7f8adc1f1b949ba4b52cd51d12cb39513293ca7c897d05509b6"} Oct 01 06:35:44 crc kubenswrapper[4661]: I1001 06:35:44.706889 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5hsft"] Oct 01 06:35:44 crc kubenswrapper[4661]: E1001 06:35:44.707984 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc3c5e7f-a131-4f27-8c62-57083890017a" containerName="collect-profiles" Oct 01 06:35:44 crc kubenswrapper[4661]: I1001 06:35:44.708016 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc3c5e7f-a131-4f27-8c62-57083890017a" containerName="collect-profiles" Oct 01 06:35:44 crc kubenswrapper[4661]: I1001 06:35:44.708289 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc3c5e7f-a131-4f27-8c62-57083890017a" containerName="collect-profiles" Oct 01 06:35:44 crc kubenswrapper[4661]: I1001 06:35:44.710257 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5hsft" Oct 01 06:35:44 crc kubenswrapper[4661]: I1001 06:35:44.721646 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5hsft"] Oct 01 06:35:44 crc kubenswrapper[4661]: I1001 06:35:44.824252 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxqz8\" (UniqueName: \"kubernetes.io/projected/199386f9-50aa-413a-960c-6fc4da967d16-kube-api-access-cxqz8\") pod \"community-operators-5hsft\" (UID: \"199386f9-50aa-413a-960c-6fc4da967d16\") " pod="openshift-marketplace/community-operators-5hsft" Oct 01 06:35:44 crc kubenswrapper[4661]: I1001 06:35:44.824330 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/199386f9-50aa-413a-960c-6fc4da967d16-utilities\") pod \"community-operators-5hsft\" (UID: \"199386f9-50aa-413a-960c-6fc4da967d16\") " pod="openshift-marketplace/community-operators-5hsft" Oct 01 06:35:44 crc kubenswrapper[4661]: I1001 06:35:44.824510 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/199386f9-50aa-413a-960c-6fc4da967d16-catalog-content\") pod \"community-operators-5hsft\" (UID: \"199386f9-50aa-413a-960c-6fc4da967d16\") " pod="openshift-marketplace/community-operators-5hsft" Oct 01 06:35:44 crc kubenswrapper[4661]: I1001 06:35:44.926117 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxqz8\" (UniqueName: \"kubernetes.io/projected/199386f9-50aa-413a-960c-6fc4da967d16-kube-api-access-cxqz8\") pod \"community-operators-5hsft\" (UID: \"199386f9-50aa-413a-960c-6fc4da967d16\") " pod="openshift-marketplace/community-operators-5hsft" Oct 01 06:35:44 crc kubenswrapper[4661]: I1001 06:35:44.926209 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/199386f9-50aa-413a-960c-6fc4da967d16-utilities\") pod \"community-operators-5hsft\" (UID: \"199386f9-50aa-413a-960c-6fc4da967d16\") " pod="openshift-marketplace/community-operators-5hsft" Oct 01 06:35:44 crc kubenswrapper[4661]: I1001 06:35:44.926244 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/199386f9-50aa-413a-960c-6fc4da967d16-catalog-content\") pod \"community-operators-5hsft\" (UID: \"199386f9-50aa-413a-960c-6fc4da967d16\") " pod="openshift-marketplace/community-operators-5hsft" Oct 01 06:35:44 crc kubenswrapper[4661]: I1001 06:35:44.926840 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/199386f9-50aa-413a-960c-6fc4da967d16-utilities\") pod \"community-operators-5hsft\" (UID: \"199386f9-50aa-413a-960c-6fc4da967d16\") " pod="openshift-marketplace/community-operators-5hsft" Oct 01 06:35:44 crc kubenswrapper[4661]: I1001 06:35:44.926977 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/199386f9-50aa-413a-960c-6fc4da967d16-catalog-content\") pod \"community-operators-5hsft\" (UID: \"199386f9-50aa-413a-960c-6fc4da967d16\") " pod="openshift-marketplace/community-operators-5hsft" Oct 01 06:35:44 crc kubenswrapper[4661]: I1001 06:35:44.945894 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxqz8\" (UniqueName: \"kubernetes.io/projected/199386f9-50aa-413a-960c-6fc4da967d16-kube-api-access-cxqz8\") pod \"community-operators-5hsft\" (UID: \"199386f9-50aa-413a-960c-6fc4da967d16\") " pod="openshift-marketplace/community-operators-5hsft" Oct 01 06:35:45 crc kubenswrapper[4661]: I1001 06:35:45.032235 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5hsft" Oct 01 06:35:45 crc kubenswrapper[4661]: I1001 06:35:45.598210 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5hsft"] Oct 01 06:35:45 crc kubenswrapper[4661]: I1001 06:35:45.652221 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5hsft" event={"ID":"199386f9-50aa-413a-960c-6fc4da967d16","Type":"ContainerStarted","Data":"cc5c0ba4fb104962228cc6f101067514e16ae6aa74d3243dca31a81c2c686c43"} Oct 01 06:35:46 crc kubenswrapper[4661]: I1001 06:35:46.664117 4661 generic.go:334] "Generic (PLEG): container finished" podID="199386f9-50aa-413a-960c-6fc4da967d16" containerID="acb7464a655a405a36938b716e0cf43fef566c2145ef4abd5332035de9acf418" exitCode=0 Oct 01 06:35:46 crc kubenswrapper[4661]: I1001 06:35:46.664216 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5hsft" event={"ID":"199386f9-50aa-413a-960c-6fc4da967d16","Type":"ContainerDied","Data":"acb7464a655a405a36938b716e0cf43fef566c2145ef4abd5332035de9acf418"} Oct 01 06:35:46 crc kubenswrapper[4661]: I1001 06:35:46.668111 4661 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 06:35:49 crc kubenswrapper[4661]: I1001 06:35:49.704166 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5hsft" event={"ID":"199386f9-50aa-413a-960c-6fc4da967d16","Type":"ContainerStarted","Data":"319cd1493291bcc2990ef590f3780879ebf1fb51e7f4286869586712fe0671e6"} Oct 01 06:35:50 crc kubenswrapper[4661]: I1001 06:35:50.725426 4661 generic.go:334] "Generic (PLEG): container finished" podID="199386f9-50aa-413a-960c-6fc4da967d16" containerID="319cd1493291bcc2990ef590f3780879ebf1fb51e7f4286869586712fe0671e6" exitCode=0 Oct 01 06:35:50 crc kubenswrapper[4661]: I1001 06:35:50.725518 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5hsft" event={"ID":"199386f9-50aa-413a-960c-6fc4da967d16","Type":"ContainerDied","Data":"319cd1493291bcc2990ef590f3780879ebf1fb51e7f4286869586712fe0671e6"} Oct 01 06:35:51 crc kubenswrapper[4661]: I1001 06:35:51.741963 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5hsft" event={"ID":"199386f9-50aa-413a-960c-6fc4da967d16","Type":"ContainerStarted","Data":"f91644c555112fbd27e79b6fe2361ec0a0e64be1a8cee6933f39bc1d918f0a41"} Oct 01 06:35:51 crc kubenswrapper[4661]: I1001 06:35:51.764808 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5hsft" podStartSLOduration=3.192493817 podStartE2EDuration="7.764786811s" podCreationTimestamp="2025-10-01 06:35:44 +0000 UTC" firstStartedPulling="2025-10-01 06:35:46.667854299 +0000 UTC m=+3995.605832923" lastFinishedPulling="2025-10-01 06:35:51.240147243 +0000 UTC m=+4000.178125917" observedRunningTime="2025-10-01 06:35:51.761497261 +0000 UTC m=+4000.699475885" watchObservedRunningTime="2025-10-01 06:35:51.764786811 +0000 UTC m=+4000.702765435" Oct 01 06:35:55 crc kubenswrapper[4661]: I1001 06:35:55.032975 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5hsft" Oct 01 06:35:55 crc kubenswrapper[4661]: I1001 06:35:55.034526 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5hsft" Oct 01 06:35:55 crc kubenswrapper[4661]: I1001 06:35:55.087246 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5hsft" Oct 01 06:35:56 crc kubenswrapper[4661]: I1001 06:35:56.846699 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5hsft" Oct 01 06:35:56 crc kubenswrapper[4661]: I1001 06:35:56.921681 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5hsft"] Oct 01 06:35:58 crc kubenswrapper[4661]: I1001 06:35:58.822046 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5hsft" podUID="199386f9-50aa-413a-960c-6fc4da967d16" containerName="registry-server" containerID="cri-o://f91644c555112fbd27e79b6fe2361ec0a0e64be1a8cee6933f39bc1d918f0a41" gracePeriod=2 Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.380014 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5hsft" Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.454744 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cxqz8\" (UniqueName: \"kubernetes.io/projected/199386f9-50aa-413a-960c-6fc4da967d16-kube-api-access-cxqz8\") pod \"199386f9-50aa-413a-960c-6fc4da967d16\" (UID: \"199386f9-50aa-413a-960c-6fc4da967d16\") " Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.454860 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/199386f9-50aa-413a-960c-6fc4da967d16-catalog-content\") pod \"199386f9-50aa-413a-960c-6fc4da967d16\" (UID: \"199386f9-50aa-413a-960c-6fc4da967d16\") " Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.454967 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/199386f9-50aa-413a-960c-6fc4da967d16-utilities\") pod \"199386f9-50aa-413a-960c-6fc4da967d16\" (UID: \"199386f9-50aa-413a-960c-6fc4da967d16\") " Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.456439 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/199386f9-50aa-413a-960c-6fc4da967d16-utilities" (OuterVolumeSpecName: "utilities") pod "199386f9-50aa-413a-960c-6fc4da967d16" (UID: "199386f9-50aa-413a-960c-6fc4da967d16"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.465079 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/199386f9-50aa-413a-960c-6fc4da967d16-kube-api-access-cxqz8" (OuterVolumeSpecName: "kube-api-access-cxqz8") pod "199386f9-50aa-413a-960c-6fc4da967d16" (UID: "199386f9-50aa-413a-960c-6fc4da967d16"). InnerVolumeSpecName "kube-api-access-cxqz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.522558 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/199386f9-50aa-413a-960c-6fc4da967d16-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "199386f9-50aa-413a-960c-6fc4da967d16" (UID: "199386f9-50aa-413a-960c-6fc4da967d16"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.557706 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cxqz8\" (UniqueName: \"kubernetes.io/projected/199386f9-50aa-413a-960c-6fc4da967d16-kube-api-access-cxqz8\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.557741 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/199386f9-50aa-413a-960c-6fc4da967d16-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.557754 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/199386f9-50aa-413a-960c-6fc4da967d16-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.853382 4661 generic.go:334] "Generic (PLEG): container finished" podID="199386f9-50aa-413a-960c-6fc4da967d16" containerID="f91644c555112fbd27e79b6fe2361ec0a0e64be1a8cee6933f39bc1d918f0a41" exitCode=0 Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.853445 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5hsft" event={"ID":"199386f9-50aa-413a-960c-6fc4da967d16","Type":"ContainerDied","Data":"f91644c555112fbd27e79b6fe2361ec0a0e64be1a8cee6933f39bc1d918f0a41"} Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.853501 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5hsft" event={"ID":"199386f9-50aa-413a-960c-6fc4da967d16","Type":"ContainerDied","Data":"cc5c0ba4fb104962228cc6f101067514e16ae6aa74d3243dca31a81c2c686c43"} Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.853539 4661 scope.go:117] "RemoveContainer" containerID="f91644c555112fbd27e79b6fe2361ec0a0e64be1a8cee6933f39bc1d918f0a41" Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.854863 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5hsft" Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.875038 4661 scope.go:117] "RemoveContainer" containerID="319cd1493291bcc2990ef590f3780879ebf1fb51e7f4286869586712fe0671e6" Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.903277 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5hsft"] Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.908945 4661 scope.go:117] "RemoveContainer" containerID="acb7464a655a405a36938b716e0cf43fef566c2145ef4abd5332035de9acf418" Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.915621 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5hsft"] Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.972958 4661 scope.go:117] "RemoveContainer" containerID="f91644c555112fbd27e79b6fe2361ec0a0e64be1a8cee6933f39bc1d918f0a41" Oct 01 06:35:59 crc kubenswrapper[4661]: E1001 06:35:59.973402 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f91644c555112fbd27e79b6fe2361ec0a0e64be1a8cee6933f39bc1d918f0a41\": container with ID starting with f91644c555112fbd27e79b6fe2361ec0a0e64be1a8cee6933f39bc1d918f0a41 not found: ID does not exist" containerID="f91644c555112fbd27e79b6fe2361ec0a0e64be1a8cee6933f39bc1d918f0a41" Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.973440 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f91644c555112fbd27e79b6fe2361ec0a0e64be1a8cee6933f39bc1d918f0a41"} err="failed to get container status \"f91644c555112fbd27e79b6fe2361ec0a0e64be1a8cee6933f39bc1d918f0a41\": rpc error: code = NotFound desc = could not find container \"f91644c555112fbd27e79b6fe2361ec0a0e64be1a8cee6933f39bc1d918f0a41\": container with ID starting with f91644c555112fbd27e79b6fe2361ec0a0e64be1a8cee6933f39bc1d918f0a41 not found: ID does not exist" Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.973466 4661 scope.go:117] "RemoveContainer" containerID="319cd1493291bcc2990ef590f3780879ebf1fb51e7f4286869586712fe0671e6" Oct 01 06:35:59 crc kubenswrapper[4661]: E1001 06:35:59.973990 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"319cd1493291bcc2990ef590f3780879ebf1fb51e7f4286869586712fe0671e6\": container with ID starting with 319cd1493291bcc2990ef590f3780879ebf1fb51e7f4286869586712fe0671e6 not found: ID does not exist" containerID="319cd1493291bcc2990ef590f3780879ebf1fb51e7f4286869586712fe0671e6" Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.974020 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"319cd1493291bcc2990ef590f3780879ebf1fb51e7f4286869586712fe0671e6"} err="failed to get container status \"319cd1493291bcc2990ef590f3780879ebf1fb51e7f4286869586712fe0671e6\": rpc error: code = NotFound desc = could not find container \"319cd1493291bcc2990ef590f3780879ebf1fb51e7f4286869586712fe0671e6\": container with ID starting with 319cd1493291bcc2990ef590f3780879ebf1fb51e7f4286869586712fe0671e6 not found: ID does not exist" Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.974039 4661 scope.go:117] "RemoveContainer" containerID="acb7464a655a405a36938b716e0cf43fef566c2145ef4abd5332035de9acf418" Oct 01 06:35:59 crc kubenswrapper[4661]: E1001 06:35:59.974322 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acb7464a655a405a36938b716e0cf43fef566c2145ef4abd5332035de9acf418\": container with ID starting with acb7464a655a405a36938b716e0cf43fef566c2145ef4abd5332035de9acf418 not found: ID does not exist" containerID="acb7464a655a405a36938b716e0cf43fef566c2145ef4abd5332035de9acf418" Oct 01 06:35:59 crc kubenswrapper[4661]: I1001 06:35:59.974348 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acb7464a655a405a36938b716e0cf43fef566c2145ef4abd5332035de9acf418"} err="failed to get container status \"acb7464a655a405a36938b716e0cf43fef566c2145ef4abd5332035de9acf418\": rpc error: code = NotFound desc = could not find container \"acb7464a655a405a36938b716e0cf43fef566c2145ef4abd5332035de9acf418\": container with ID starting with acb7464a655a405a36938b716e0cf43fef566c2145ef4abd5332035de9acf418 not found: ID does not exist" Oct 01 06:36:01 crc kubenswrapper[4661]: I1001 06:36:01.778367 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="199386f9-50aa-413a-960c-6fc4da967d16" path="/var/lib/kubelet/pods/199386f9-50aa-413a-960c-6fc4da967d16/volumes" Oct 01 06:36:34 crc kubenswrapper[4661]: I1001 06:36:34.309301 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:36:34 crc kubenswrapper[4661]: I1001 06:36:34.309802 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:37:04 crc kubenswrapper[4661]: I1001 06:37:04.308845 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:37:04 crc kubenswrapper[4661]: I1001 06:37:04.309748 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:37:21 crc kubenswrapper[4661]: I1001 06:37:21.904771 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gvczn"] Oct 01 06:37:21 crc kubenswrapper[4661]: E1001 06:37:21.906685 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="199386f9-50aa-413a-960c-6fc4da967d16" containerName="registry-server" Oct 01 06:37:21 crc kubenswrapper[4661]: I1001 06:37:21.906713 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="199386f9-50aa-413a-960c-6fc4da967d16" containerName="registry-server" Oct 01 06:37:21 crc kubenswrapper[4661]: E1001 06:37:21.906759 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="199386f9-50aa-413a-960c-6fc4da967d16" containerName="extract-content" Oct 01 06:37:21 crc kubenswrapper[4661]: I1001 06:37:21.906772 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="199386f9-50aa-413a-960c-6fc4da967d16" containerName="extract-content" Oct 01 06:37:21 crc kubenswrapper[4661]: E1001 06:37:21.906797 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="199386f9-50aa-413a-960c-6fc4da967d16" containerName="extract-utilities" Oct 01 06:37:21 crc kubenswrapper[4661]: I1001 06:37:21.906810 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="199386f9-50aa-413a-960c-6fc4da967d16" containerName="extract-utilities" Oct 01 06:37:21 crc kubenswrapper[4661]: I1001 06:37:21.907362 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="199386f9-50aa-413a-960c-6fc4da967d16" containerName="registry-server" Oct 01 06:37:21 crc kubenswrapper[4661]: I1001 06:37:21.910429 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gvczn" Oct 01 06:37:21 crc kubenswrapper[4661]: I1001 06:37:21.940092 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gvczn"] Oct 01 06:37:21 crc kubenswrapper[4661]: I1001 06:37:21.965161 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nb9f2\" (UniqueName: \"kubernetes.io/projected/2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4-kube-api-access-nb9f2\") pod \"redhat-operators-gvczn\" (UID: \"2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4\") " pod="openshift-marketplace/redhat-operators-gvczn" Oct 01 06:37:21 crc kubenswrapper[4661]: I1001 06:37:21.965261 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4-utilities\") pod \"redhat-operators-gvczn\" (UID: \"2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4\") " pod="openshift-marketplace/redhat-operators-gvczn" Oct 01 06:37:21 crc kubenswrapper[4661]: I1001 06:37:21.965348 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4-catalog-content\") pod \"redhat-operators-gvczn\" (UID: \"2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4\") " pod="openshift-marketplace/redhat-operators-gvczn" Oct 01 06:37:22 crc kubenswrapper[4661]: I1001 06:37:22.066996 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4-catalog-content\") pod \"redhat-operators-gvczn\" (UID: \"2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4\") " pod="openshift-marketplace/redhat-operators-gvczn" Oct 01 06:37:22 crc kubenswrapper[4661]: I1001 06:37:22.067134 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nb9f2\" (UniqueName: \"kubernetes.io/projected/2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4-kube-api-access-nb9f2\") pod \"redhat-operators-gvczn\" (UID: \"2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4\") " pod="openshift-marketplace/redhat-operators-gvczn" Oct 01 06:37:22 crc kubenswrapper[4661]: I1001 06:37:22.067217 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4-utilities\") pod \"redhat-operators-gvczn\" (UID: \"2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4\") " pod="openshift-marketplace/redhat-operators-gvczn" Oct 01 06:37:22 crc kubenswrapper[4661]: I1001 06:37:22.067697 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4-catalog-content\") pod \"redhat-operators-gvczn\" (UID: \"2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4\") " pod="openshift-marketplace/redhat-operators-gvczn" Oct 01 06:37:22 crc kubenswrapper[4661]: I1001 06:37:22.067774 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4-utilities\") pod \"redhat-operators-gvczn\" (UID: \"2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4\") " pod="openshift-marketplace/redhat-operators-gvczn" Oct 01 06:37:22 crc kubenswrapper[4661]: I1001 06:37:22.086501 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nb9f2\" (UniqueName: \"kubernetes.io/projected/2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4-kube-api-access-nb9f2\") pod \"redhat-operators-gvczn\" (UID: \"2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4\") " pod="openshift-marketplace/redhat-operators-gvczn" Oct 01 06:37:22 crc kubenswrapper[4661]: I1001 06:37:22.240396 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gvczn" Oct 01 06:37:22 crc kubenswrapper[4661]: I1001 06:37:22.712083 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gvczn"] Oct 01 06:37:22 crc kubenswrapper[4661]: I1001 06:37:22.816197 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gvczn" event={"ID":"2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4","Type":"ContainerStarted","Data":"7f914628b4d88973ad92f5793a22c1a5c8bd3747c64843e0b39bde78345bf7b0"} Oct 01 06:37:23 crc kubenswrapper[4661]: I1001 06:37:23.831926 4661 generic.go:334] "Generic (PLEG): container finished" podID="2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4" containerID="5154af7bd2f20964c975edb48d04d21dfaf520d7f060ad5ef4cba985e0816059" exitCode=0 Oct 01 06:37:23 crc kubenswrapper[4661]: I1001 06:37:23.832026 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gvczn" event={"ID":"2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4","Type":"ContainerDied","Data":"5154af7bd2f20964c975edb48d04d21dfaf520d7f060ad5ef4cba985e0816059"} Oct 01 06:37:25 crc kubenswrapper[4661]: I1001 06:37:25.884158 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gvczn" event={"ID":"2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4","Type":"ContainerStarted","Data":"641ffd214199358204e4718f6247e1943a7525e7638e7449ad7ee0e573c0ba49"} Oct 01 06:37:29 crc kubenswrapper[4661]: I1001 06:37:29.945146 4661 generic.go:334] "Generic (PLEG): container finished" podID="2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4" containerID="641ffd214199358204e4718f6247e1943a7525e7638e7449ad7ee0e573c0ba49" exitCode=0 Oct 01 06:37:29 crc kubenswrapper[4661]: I1001 06:37:29.945804 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gvczn" event={"ID":"2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4","Type":"ContainerDied","Data":"641ffd214199358204e4718f6247e1943a7525e7638e7449ad7ee0e573c0ba49"} Oct 01 06:37:30 crc kubenswrapper[4661]: I1001 06:37:30.961154 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gvczn" event={"ID":"2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4","Type":"ContainerStarted","Data":"50efbe44832a0cada49d9da8aec3d4d3176d8affec7facafc02e8895cd97acb8"} Oct 01 06:37:30 crc kubenswrapper[4661]: I1001 06:37:30.997038 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gvczn" podStartSLOduration=3.46991499 podStartE2EDuration="9.997014025s" podCreationTimestamp="2025-10-01 06:37:21 +0000 UTC" firstStartedPulling="2025-10-01 06:37:23.836407032 +0000 UTC m=+4092.774385696" lastFinishedPulling="2025-10-01 06:37:30.363506107 +0000 UTC m=+4099.301484731" observedRunningTime="2025-10-01 06:37:30.9861911 +0000 UTC m=+4099.924169724" watchObservedRunningTime="2025-10-01 06:37:30.997014025 +0000 UTC m=+4099.934992659" Oct 01 06:37:32 crc kubenswrapper[4661]: I1001 06:37:32.240584 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gvczn" Oct 01 06:37:32 crc kubenswrapper[4661]: I1001 06:37:32.241931 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gvczn" Oct 01 06:37:33 crc kubenswrapper[4661]: I1001 06:37:33.296713 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gvczn" podUID="2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4" containerName="registry-server" probeResult="failure" output=< Oct 01 06:37:33 crc kubenswrapper[4661]: timeout: failed to connect service ":50051" within 1s Oct 01 06:37:33 crc kubenswrapper[4661]: > Oct 01 06:37:34 crc kubenswrapper[4661]: I1001 06:37:34.309032 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:37:34 crc kubenswrapper[4661]: I1001 06:37:34.309400 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:37:34 crc kubenswrapper[4661]: I1001 06:37:34.309467 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 06:37:34 crc kubenswrapper[4661]: I1001 06:37:34.310509 4661 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"908374508ae1b7f8adc1f1b949ba4b52cd51d12cb39513293ca7c897d05509b6"} pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 06:37:34 crc kubenswrapper[4661]: I1001 06:37:34.310619 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" containerID="cri-o://908374508ae1b7f8adc1f1b949ba4b52cd51d12cb39513293ca7c897d05509b6" gracePeriod=600 Oct 01 06:37:35 crc kubenswrapper[4661]: I1001 06:37:35.000510 4661 generic.go:334] "Generic (PLEG): container finished" podID="7584c4bc-4202-487e-a2b4-4319f428a792" containerID="908374508ae1b7f8adc1f1b949ba4b52cd51d12cb39513293ca7c897d05509b6" exitCode=0 Oct 01 06:37:35 crc kubenswrapper[4661]: I1001 06:37:35.000609 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerDied","Data":"908374508ae1b7f8adc1f1b949ba4b52cd51d12cb39513293ca7c897d05509b6"} Oct 01 06:37:35 crc kubenswrapper[4661]: I1001 06:37:35.000981 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc"} Oct 01 06:37:35 crc kubenswrapper[4661]: I1001 06:37:35.001012 4661 scope.go:117] "RemoveContainer" containerID="411af41e4225690495dc44956dd0b93695d91c1e4786dc3149c00c368f03e34e" Oct 01 06:37:43 crc kubenswrapper[4661]: I1001 06:37:43.309563 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gvczn" podUID="2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4" containerName="registry-server" probeResult="failure" output=< Oct 01 06:37:43 crc kubenswrapper[4661]: timeout: failed to connect service ":50051" within 1s Oct 01 06:37:43 crc kubenswrapper[4661]: > Oct 01 06:37:52 crc kubenswrapper[4661]: I1001 06:37:52.699562 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gvczn" Oct 01 06:37:52 crc kubenswrapper[4661]: I1001 06:37:52.762879 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gvczn" Oct 01 06:37:53 crc kubenswrapper[4661]: I1001 06:37:53.078934 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gvczn"] Oct 01 06:37:54 crc kubenswrapper[4661]: I1001 06:37:54.248258 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gvczn" podUID="2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4" containerName="registry-server" containerID="cri-o://50efbe44832a0cada49d9da8aec3d4d3176d8affec7facafc02e8895cd97acb8" gracePeriod=2 Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.208516 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gvczn" Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.258372 4661 generic.go:334] "Generic (PLEG): container finished" podID="2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4" containerID="50efbe44832a0cada49d9da8aec3d4d3176d8affec7facafc02e8895cd97acb8" exitCode=0 Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.258413 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gvczn" event={"ID":"2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4","Type":"ContainerDied","Data":"50efbe44832a0cada49d9da8aec3d4d3176d8affec7facafc02e8895cd97acb8"} Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.258438 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gvczn" event={"ID":"2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4","Type":"ContainerDied","Data":"7f914628b4d88973ad92f5793a22c1a5c8bd3747c64843e0b39bde78345bf7b0"} Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.258440 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gvczn" Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.258457 4661 scope.go:117] "RemoveContainer" containerID="50efbe44832a0cada49d9da8aec3d4d3176d8affec7facafc02e8895cd97acb8" Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.279448 4661 scope.go:117] "RemoveContainer" containerID="641ffd214199358204e4718f6247e1943a7525e7638e7449ad7ee0e573c0ba49" Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.289371 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4-catalog-content\") pod \"2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4\" (UID: \"2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4\") " Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.289487 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nb9f2\" (UniqueName: \"kubernetes.io/projected/2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4-kube-api-access-nb9f2\") pod \"2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4\" (UID: \"2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4\") " Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.289653 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4-utilities\") pod \"2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4\" (UID: \"2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4\") " Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.291564 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4-utilities" (OuterVolumeSpecName: "utilities") pod "2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4" (UID: "2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.292820 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.295863 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4-kube-api-access-nb9f2" (OuterVolumeSpecName: "kube-api-access-nb9f2") pod "2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4" (UID: "2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4"). InnerVolumeSpecName "kube-api-access-nb9f2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.308896 4661 scope.go:117] "RemoveContainer" containerID="5154af7bd2f20964c975edb48d04d21dfaf520d7f060ad5ef4cba985e0816059" Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.352097 4661 scope.go:117] "RemoveContainer" containerID="50efbe44832a0cada49d9da8aec3d4d3176d8affec7facafc02e8895cd97acb8" Oct 01 06:37:55 crc kubenswrapper[4661]: E1001 06:37:55.352458 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50efbe44832a0cada49d9da8aec3d4d3176d8affec7facafc02e8895cd97acb8\": container with ID starting with 50efbe44832a0cada49d9da8aec3d4d3176d8affec7facafc02e8895cd97acb8 not found: ID does not exist" containerID="50efbe44832a0cada49d9da8aec3d4d3176d8affec7facafc02e8895cd97acb8" Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.352534 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50efbe44832a0cada49d9da8aec3d4d3176d8affec7facafc02e8895cd97acb8"} err="failed to get container status \"50efbe44832a0cada49d9da8aec3d4d3176d8affec7facafc02e8895cd97acb8\": rpc error: code = NotFound desc = could not find container \"50efbe44832a0cada49d9da8aec3d4d3176d8affec7facafc02e8895cd97acb8\": container with ID starting with 50efbe44832a0cada49d9da8aec3d4d3176d8affec7facafc02e8895cd97acb8 not found: ID does not exist" Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.352581 4661 scope.go:117] "RemoveContainer" containerID="641ffd214199358204e4718f6247e1943a7525e7638e7449ad7ee0e573c0ba49" Oct 01 06:37:55 crc kubenswrapper[4661]: E1001 06:37:55.352962 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"641ffd214199358204e4718f6247e1943a7525e7638e7449ad7ee0e573c0ba49\": container with ID starting with 641ffd214199358204e4718f6247e1943a7525e7638e7449ad7ee0e573c0ba49 not found: ID does not exist" containerID="641ffd214199358204e4718f6247e1943a7525e7638e7449ad7ee0e573c0ba49" Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.353005 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"641ffd214199358204e4718f6247e1943a7525e7638e7449ad7ee0e573c0ba49"} err="failed to get container status \"641ffd214199358204e4718f6247e1943a7525e7638e7449ad7ee0e573c0ba49\": rpc error: code = NotFound desc = could not find container \"641ffd214199358204e4718f6247e1943a7525e7638e7449ad7ee0e573c0ba49\": container with ID starting with 641ffd214199358204e4718f6247e1943a7525e7638e7449ad7ee0e573c0ba49 not found: ID does not exist" Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.353031 4661 scope.go:117] "RemoveContainer" containerID="5154af7bd2f20964c975edb48d04d21dfaf520d7f060ad5ef4cba985e0816059" Oct 01 06:37:55 crc kubenswrapper[4661]: E1001 06:37:55.353301 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5154af7bd2f20964c975edb48d04d21dfaf520d7f060ad5ef4cba985e0816059\": container with ID starting with 5154af7bd2f20964c975edb48d04d21dfaf520d7f060ad5ef4cba985e0816059 not found: ID does not exist" containerID="5154af7bd2f20964c975edb48d04d21dfaf520d7f060ad5ef4cba985e0816059" Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.353345 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5154af7bd2f20964c975edb48d04d21dfaf520d7f060ad5ef4cba985e0816059"} err="failed to get container status \"5154af7bd2f20964c975edb48d04d21dfaf520d7f060ad5ef4cba985e0816059\": rpc error: code = NotFound desc = could not find container \"5154af7bd2f20964c975edb48d04d21dfaf520d7f060ad5ef4cba985e0816059\": container with ID starting with 5154af7bd2f20964c975edb48d04d21dfaf520d7f060ad5ef4cba985e0816059 not found: ID does not exist" Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.396282 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nb9f2\" (UniqueName: \"kubernetes.io/projected/2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4-kube-api-access-nb9f2\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.402298 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4" (UID: "2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.498502 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.608916 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gvczn"] Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.619499 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gvczn"] Oct 01 06:37:55 crc kubenswrapper[4661]: I1001 06:37:55.772292 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4" path="/var/lib/kubelet/pods/2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4/volumes" Oct 01 06:38:04 crc kubenswrapper[4661]: I1001 06:38:04.227705 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7ql6f"] Oct 01 06:38:04 crc kubenswrapper[4661]: E1001 06:38:04.228988 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4" containerName="extract-utilities" Oct 01 06:38:04 crc kubenswrapper[4661]: I1001 06:38:04.229011 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4" containerName="extract-utilities" Oct 01 06:38:04 crc kubenswrapper[4661]: E1001 06:38:04.229040 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4" containerName="extract-content" Oct 01 06:38:04 crc kubenswrapper[4661]: I1001 06:38:04.229053 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4" containerName="extract-content" Oct 01 06:38:04 crc kubenswrapper[4661]: E1001 06:38:04.229150 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4" containerName="registry-server" Oct 01 06:38:04 crc kubenswrapper[4661]: I1001 06:38:04.229166 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4" containerName="registry-server" Oct 01 06:38:04 crc kubenswrapper[4661]: I1001 06:38:04.229869 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="2efbf9d6-e7d4-4e80-a2a7-f5717f0927a4" containerName="registry-server" Oct 01 06:38:04 crc kubenswrapper[4661]: I1001 06:38:04.232627 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7ql6f" Oct 01 06:38:04 crc kubenswrapper[4661]: I1001 06:38:04.262623 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7ql6f"] Oct 01 06:38:04 crc kubenswrapper[4661]: I1001 06:38:04.398533 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf51bc76-18cc-4151-b638-1f7da4d45a80-catalog-content\") pod \"redhat-marketplace-7ql6f\" (UID: \"bf51bc76-18cc-4151-b638-1f7da4d45a80\") " pod="openshift-marketplace/redhat-marketplace-7ql6f" Oct 01 06:38:04 crc kubenswrapper[4661]: I1001 06:38:04.398746 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldwv4\" (UniqueName: \"kubernetes.io/projected/bf51bc76-18cc-4151-b638-1f7da4d45a80-kube-api-access-ldwv4\") pod \"redhat-marketplace-7ql6f\" (UID: \"bf51bc76-18cc-4151-b638-1f7da4d45a80\") " pod="openshift-marketplace/redhat-marketplace-7ql6f" Oct 01 06:38:04 crc kubenswrapper[4661]: I1001 06:38:04.398900 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf51bc76-18cc-4151-b638-1f7da4d45a80-utilities\") pod \"redhat-marketplace-7ql6f\" (UID: \"bf51bc76-18cc-4151-b638-1f7da4d45a80\") " pod="openshift-marketplace/redhat-marketplace-7ql6f" Oct 01 06:38:04 crc kubenswrapper[4661]: I1001 06:38:04.500698 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldwv4\" (UniqueName: \"kubernetes.io/projected/bf51bc76-18cc-4151-b638-1f7da4d45a80-kube-api-access-ldwv4\") pod \"redhat-marketplace-7ql6f\" (UID: \"bf51bc76-18cc-4151-b638-1f7da4d45a80\") " pod="openshift-marketplace/redhat-marketplace-7ql6f" Oct 01 06:38:04 crc kubenswrapper[4661]: I1001 06:38:04.500799 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf51bc76-18cc-4151-b638-1f7da4d45a80-utilities\") pod \"redhat-marketplace-7ql6f\" (UID: \"bf51bc76-18cc-4151-b638-1f7da4d45a80\") " pod="openshift-marketplace/redhat-marketplace-7ql6f" Oct 01 06:38:04 crc kubenswrapper[4661]: I1001 06:38:04.500967 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf51bc76-18cc-4151-b638-1f7da4d45a80-catalog-content\") pod \"redhat-marketplace-7ql6f\" (UID: \"bf51bc76-18cc-4151-b638-1f7da4d45a80\") " pod="openshift-marketplace/redhat-marketplace-7ql6f" Oct 01 06:38:04 crc kubenswrapper[4661]: I1001 06:38:04.501565 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf51bc76-18cc-4151-b638-1f7da4d45a80-catalog-content\") pod \"redhat-marketplace-7ql6f\" (UID: \"bf51bc76-18cc-4151-b638-1f7da4d45a80\") " pod="openshift-marketplace/redhat-marketplace-7ql6f" Oct 01 06:38:04 crc kubenswrapper[4661]: I1001 06:38:04.501590 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf51bc76-18cc-4151-b638-1f7da4d45a80-utilities\") pod \"redhat-marketplace-7ql6f\" (UID: \"bf51bc76-18cc-4151-b638-1f7da4d45a80\") " pod="openshift-marketplace/redhat-marketplace-7ql6f" Oct 01 06:38:04 crc kubenswrapper[4661]: I1001 06:38:04.520937 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldwv4\" (UniqueName: \"kubernetes.io/projected/bf51bc76-18cc-4151-b638-1f7da4d45a80-kube-api-access-ldwv4\") pod \"redhat-marketplace-7ql6f\" (UID: \"bf51bc76-18cc-4151-b638-1f7da4d45a80\") " pod="openshift-marketplace/redhat-marketplace-7ql6f" Oct 01 06:38:04 crc kubenswrapper[4661]: I1001 06:38:04.560590 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7ql6f" Oct 01 06:38:05 crc kubenswrapper[4661]: I1001 06:38:05.090388 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7ql6f"] Oct 01 06:38:05 crc kubenswrapper[4661]: I1001 06:38:05.378004 4661 generic.go:334] "Generic (PLEG): container finished" podID="bf51bc76-18cc-4151-b638-1f7da4d45a80" containerID="d5af7cefafc4ba68acd8e2c8091c5b322940de57a9dec841ab1622f587475774" exitCode=0 Oct 01 06:38:05 crc kubenswrapper[4661]: I1001 06:38:05.378314 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7ql6f" event={"ID":"bf51bc76-18cc-4151-b638-1f7da4d45a80","Type":"ContainerDied","Data":"d5af7cefafc4ba68acd8e2c8091c5b322940de57a9dec841ab1622f587475774"} Oct 01 06:38:05 crc kubenswrapper[4661]: I1001 06:38:05.378340 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7ql6f" event={"ID":"bf51bc76-18cc-4151-b638-1f7da4d45a80","Type":"ContainerStarted","Data":"a7bdbb833b3886ca2eaee1b13a82dad65ba354f5af335b99960d9836b1f2874a"} Oct 01 06:38:06 crc kubenswrapper[4661]: I1001 06:38:06.412865 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7ql6f" event={"ID":"bf51bc76-18cc-4151-b638-1f7da4d45a80","Type":"ContainerStarted","Data":"e92f79fd792f50e8ce17bdd798722241accd90c6812b0a88ef87d6c5f624cc6f"} Oct 01 06:38:06 crc kubenswrapper[4661]: I1001 06:38:06.617440 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lcs8v"] Oct 01 06:38:06 crc kubenswrapper[4661]: I1001 06:38:06.621608 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lcs8v" Oct 01 06:38:06 crc kubenswrapper[4661]: I1001 06:38:06.630393 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lcs8v"] Oct 01 06:38:06 crc kubenswrapper[4661]: I1001 06:38:06.745775 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/747cb074-9efd-4fc8-8bef-03cb3a42ff81-catalog-content\") pod \"certified-operators-lcs8v\" (UID: \"747cb074-9efd-4fc8-8bef-03cb3a42ff81\") " pod="openshift-marketplace/certified-operators-lcs8v" Oct 01 06:38:06 crc kubenswrapper[4661]: I1001 06:38:06.745942 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4hpn\" (UniqueName: \"kubernetes.io/projected/747cb074-9efd-4fc8-8bef-03cb3a42ff81-kube-api-access-x4hpn\") pod \"certified-operators-lcs8v\" (UID: \"747cb074-9efd-4fc8-8bef-03cb3a42ff81\") " pod="openshift-marketplace/certified-operators-lcs8v" Oct 01 06:38:06 crc kubenswrapper[4661]: I1001 06:38:06.746011 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/747cb074-9efd-4fc8-8bef-03cb3a42ff81-utilities\") pod \"certified-operators-lcs8v\" (UID: \"747cb074-9efd-4fc8-8bef-03cb3a42ff81\") " pod="openshift-marketplace/certified-operators-lcs8v" Oct 01 06:38:06 crc kubenswrapper[4661]: I1001 06:38:06.847456 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/747cb074-9efd-4fc8-8bef-03cb3a42ff81-catalog-content\") pod \"certified-operators-lcs8v\" (UID: \"747cb074-9efd-4fc8-8bef-03cb3a42ff81\") " pod="openshift-marketplace/certified-operators-lcs8v" Oct 01 06:38:06 crc kubenswrapper[4661]: I1001 06:38:06.847659 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4hpn\" (UniqueName: \"kubernetes.io/projected/747cb074-9efd-4fc8-8bef-03cb3a42ff81-kube-api-access-x4hpn\") pod \"certified-operators-lcs8v\" (UID: \"747cb074-9efd-4fc8-8bef-03cb3a42ff81\") " pod="openshift-marketplace/certified-operators-lcs8v" Oct 01 06:38:06 crc kubenswrapper[4661]: I1001 06:38:06.847760 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/747cb074-9efd-4fc8-8bef-03cb3a42ff81-utilities\") pod \"certified-operators-lcs8v\" (UID: \"747cb074-9efd-4fc8-8bef-03cb3a42ff81\") " pod="openshift-marketplace/certified-operators-lcs8v" Oct 01 06:38:06 crc kubenswrapper[4661]: I1001 06:38:06.848038 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/747cb074-9efd-4fc8-8bef-03cb3a42ff81-catalog-content\") pod \"certified-operators-lcs8v\" (UID: \"747cb074-9efd-4fc8-8bef-03cb3a42ff81\") " pod="openshift-marketplace/certified-operators-lcs8v" Oct 01 06:38:06 crc kubenswrapper[4661]: I1001 06:38:06.848386 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/747cb074-9efd-4fc8-8bef-03cb3a42ff81-utilities\") pod \"certified-operators-lcs8v\" (UID: \"747cb074-9efd-4fc8-8bef-03cb3a42ff81\") " pod="openshift-marketplace/certified-operators-lcs8v" Oct 01 06:38:06 crc kubenswrapper[4661]: I1001 06:38:06.926696 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4hpn\" (UniqueName: \"kubernetes.io/projected/747cb074-9efd-4fc8-8bef-03cb3a42ff81-kube-api-access-x4hpn\") pod \"certified-operators-lcs8v\" (UID: \"747cb074-9efd-4fc8-8bef-03cb3a42ff81\") " pod="openshift-marketplace/certified-operators-lcs8v" Oct 01 06:38:06 crc kubenswrapper[4661]: I1001 06:38:06.944221 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lcs8v" Oct 01 06:38:07 crc kubenswrapper[4661]: I1001 06:38:07.423723 4661 generic.go:334] "Generic (PLEG): container finished" podID="bf51bc76-18cc-4151-b638-1f7da4d45a80" containerID="e92f79fd792f50e8ce17bdd798722241accd90c6812b0a88ef87d6c5f624cc6f" exitCode=0 Oct 01 06:38:07 crc kubenswrapper[4661]: I1001 06:38:07.424336 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7ql6f" event={"ID":"bf51bc76-18cc-4151-b638-1f7da4d45a80","Type":"ContainerDied","Data":"e92f79fd792f50e8ce17bdd798722241accd90c6812b0a88ef87d6c5f624cc6f"} Oct 01 06:38:07 crc kubenswrapper[4661]: I1001 06:38:07.432789 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lcs8v"] Oct 01 06:38:08 crc kubenswrapper[4661]: I1001 06:38:08.435686 4661 generic.go:334] "Generic (PLEG): container finished" podID="747cb074-9efd-4fc8-8bef-03cb3a42ff81" containerID="ab8f156e01bfedd90e12070dbfa01880f489bdff044aa360295d3623a4dde7a0" exitCode=0 Oct 01 06:38:08 crc kubenswrapper[4661]: I1001 06:38:08.436129 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lcs8v" event={"ID":"747cb074-9efd-4fc8-8bef-03cb3a42ff81","Type":"ContainerDied","Data":"ab8f156e01bfedd90e12070dbfa01880f489bdff044aa360295d3623a4dde7a0"} Oct 01 06:38:08 crc kubenswrapper[4661]: I1001 06:38:08.436161 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lcs8v" event={"ID":"747cb074-9efd-4fc8-8bef-03cb3a42ff81","Type":"ContainerStarted","Data":"c2d011bb992a9a824f129cbf04f9ad60251b97a807243ba916fd275fd22ffb91"} Oct 01 06:38:08 crc kubenswrapper[4661]: I1001 06:38:08.443493 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7ql6f" event={"ID":"bf51bc76-18cc-4151-b638-1f7da4d45a80","Type":"ContainerStarted","Data":"29e7ecd19486b5fb9509ef4348c72129d933eaed1c7ff5939bd0441777876097"} Oct 01 06:38:08 crc kubenswrapper[4661]: I1001 06:38:08.485986 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7ql6f" podStartSLOduration=1.8738826720000001 podStartE2EDuration="4.48596827s" podCreationTimestamp="2025-10-01 06:38:04 +0000 UTC" firstStartedPulling="2025-10-01 06:38:05.380610532 +0000 UTC m=+4134.318589146" lastFinishedPulling="2025-10-01 06:38:07.99269614 +0000 UTC m=+4136.930674744" observedRunningTime="2025-10-01 06:38:08.482206867 +0000 UTC m=+4137.420185501" watchObservedRunningTime="2025-10-01 06:38:08.48596827 +0000 UTC m=+4137.423946884" Oct 01 06:38:09 crc kubenswrapper[4661]: I1001 06:38:09.455773 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lcs8v" event={"ID":"747cb074-9efd-4fc8-8bef-03cb3a42ff81","Type":"ContainerStarted","Data":"63eeee878ba1f4a2bad2ca7f954e2314e8290c8ec2a09eb111aef38647d0b620"} Oct 01 06:38:10 crc kubenswrapper[4661]: I1001 06:38:10.473098 4661 generic.go:334] "Generic (PLEG): container finished" podID="747cb074-9efd-4fc8-8bef-03cb3a42ff81" containerID="63eeee878ba1f4a2bad2ca7f954e2314e8290c8ec2a09eb111aef38647d0b620" exitCode=0 Oct 01 06:38:10 crc kubenswrapper[4661]: I1001 06:38:10.473600 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lcs8v" event={"ID":"747cb074-9efd-4fc8-8bef-03cb3a42ff81","Type":"ContainerDied","Data":"63eeee878ba1f4a2bad2ca7f954e2314e8290c8ec2a09eb111aef38647d0b620"} Oct 01 06:38:11 crc kubenswrapper[4661]: I1001 06:38:11.487070 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lcs8v" event={"ID":"747cb074-9efd-4fc8-8bef-03cb3a42ff81","Type":"ContainerStarted","Data":"ef9fa914b93a350b8c876848eed9139e4146d01770f9e9073861e378e3855efe"} Oct 01 06:38:11 crc kubenswrapper[4661]: I1001 06:38:11.520991 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lcs8v" podStartSLOduration=3.031745137 podStartE2EDuration="5.520970892s" podCreationTimestamp="2025-10-01 06:38:06 +0000 UTC" firstStartedPulling="2025-10-01 06:38:08.437959116 +0000 UTC m=+4137.375937740" lastFinishedPulling="2025-10-01 06:38:10.927184881 +0000 UTC m=+4139.865163495" observedRunningTime="2025-10-01 06:38:11.50917735 +0000 UTC m=+4140.447155984" watchObservedRunningTime="2025-10-01 06:38:11.520970892 +0000 UTC m=+4140.458949526" Oct 01 06:38:14 crc kubenswrapper[4661]: I1001 06:38:14.561389 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7ql6f" Oct 01 06:38:14 crc kubenswrapper[4661]: I1001 06:38:14.562024 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7ql6f" Oct 01 06:38:14 crc kubenswrapper[4661]: I1001 06:38:14.622489 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7ql6f" Oct 01 06:38:15 crc kubenswrapper[4661]: I1001 06:38:15.618857 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7ql6f" Oct 01 06:38:16 crc kubenswrapper[4661]: I1001 06:38:16.394438 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7ql6f"] Oct 01 06:38:16 crc kubenswrapper[4661]: I1001 06:38:16.945496 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lcs8v" Oct 01 06:38:16 crc kubenswrapper[4661]: I1001 06:38:16.946004 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lcs8v" Oct 01 06:38:17 crc kubenswrapper[4661]: I1001 06:38:17.006284 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lcs8v" Oct 01 06:38:17 crc kubenswrapper[4661]: I1001 06:38:17.549109 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7ql6f" podUID="bf51bc76-18cc-4151-b638-1f7da4d45a80" containerName="registry-server" containerID="cri-o://29e7ecd19486b5fb9509ef4348c72129d933eaed1c7ff5939bd0441777876097" gracePeriod=2 Oct 01 06:38:17 crc kubenswrapper[4661]: I1001 06:38:17.619415 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lcs8v" Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.092234 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7ql6f" Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.182790 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldwv4\" (UniqueName: \"kubernetes.io/projected/bf51bc76-18cc-4151-b638-1f7da4d45a80-kube-api-access-ldwv4\") pod \"bf51bc76-18cc-4151-b638-1f7da4d45a80\" (UID: \"bf51bc76-18cc-4151-b638-1f7da4d45a80\") " Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.182922 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf51bc76-18cc-4151-b638-1f7da4d45a80-catalog-content\") pod \"bf51bc76-18cc-4151-b638-1f7da4d45a80\" (UID: \"bf51bc76-18cc-4151-b638-1f7da4d45a80\") " Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.183012 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf51bc76-18cc-4151-b638-1f7da4d45a80-utilities\") pod \"bf51bc76-18cc-4151-b638-1f7da4d45a80\" (UID: \"bf51bc76-18cc-4151-b638-1f7da4d45a80\") " Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.184719 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf51bc76-18cc-4151-b638-1f7da4d45a80-utilities" (OuterVolumeSpecName: "utilities") pod "bf51bc76-18cc-4151-b638-1f7da4d45a80" (UID: "bf51bc76-18cc-4151-b638-1f7da4d45a80"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.191666 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf51bc76-18cc-4151-b638-1f7da4d45a80-kube-api-access-ldwv4" (OuterVolumeSpecName: "kube-api-access-ldwv4") pod "bf51bc76-18cc-4151-b638-1f7da4d45a80" (UID: "bf51bc76-18cc-4151-b638-1f7da4d45a80"). InnerVolumeSpecName "kube-api-access-ldwv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.204264 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf51bc76-18cc-4151-b638-1f7da4d45a80-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bf51bc76-18cc-4151-b638-1f7da4d45a80" (UID: "bf51bc76-18cc-4151-b638-1f7da4d45a80"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.286877 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf51bc76-18cc-4151-b638-1f7da4d45a80-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.286938 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf51bc76-18cc-4151-b638-1f7da4d45a80-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.286959 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldwv4\" (UniqueName: \"kubernetes.io/projected/bf51bc76-18cc-4151-b638-1f7da4d45a80-kube-api-access-ldwv4\") on node \"crc\" DevicePath \"\"" Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.567528 4661 generic.go:334] "Generic (PLEG): container finished" podID="bf51bc76-18cc-4151-b638-1f7da4d45a80" containerID="29e7ecd19486b5fb9509ef4348c72129d933eaed1c7ff5939bd0441777876097" exitCode=0 Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.567662 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7ql6f" event={"ID":"bf51bc76-18cc-4151-b638-1f7da4d45a80","Type":"ContainerDied","Data":"29e7ecd19486b5fb9509ef4348c72129d933eaed1c7ff5939bd0441777876097"} Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.568096 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7ql6f" event={"ID":"bf51bc76-18cc-4151-b638-1f7da4d45a80","Type":"ContainerDied","Data":"a7bdbb833b3886ca2eaee1b13a82dad65ba354f5af335b99960d9836b1f2874a"} Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.568139 4661 scope.go:117] "RemoveContainer" containerID="29e7ecd19486b5fb9509ef4348c72129d933eaed1c7ff5939bd0441777876097" Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.567687 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7ql6f" Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.603399 4661 scope.go:117] "RemoveContainer" containerID="e92f79fd792f50e8ce17bdd798722241accd90c6812b0a88ef87d6c5f624cc6f" Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.614763 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7ql6f"] Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.629318 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7ql6f"] Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.643200 4661 scope.go:117] "RemoveContainer" containerID="d5af7cefafc4ba68acd8e2c8091c5b322940de57a9dec841ab1622f587475774" Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.696842 4661 scope.go:117] "RemoveContainer" containerID="29e7ecd19486b5fb9509ef4348c72129d933eaed1c7ff5939bd0441777876097" Oct 01 06:38:18 crc kubenswrapper[4661]: E1001 06:38:18.697251 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29e7ecd19486b5fb9509ef4348c72129d933eaed1c7ff5939bd0441777876097\": container with ID starting with 29e7ecd19486b5fb9509ef4348c72129d933eaed1c7ff5939bd0441777876097 not found: ID does not exist" containerID="29e7ecd19486b5fb9509ef4348c72129d933eaed1c7ff5939bd0441777876097" Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.697297 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29e7ecd19486b5fb9509ef4348c72129d933eaed1c7ff5939bd0441777876097"} err="failed to get container status \"29e7ecd19486b5fb9509ef4348c72129d933eaed1c7ff5939bd0441777876097\": rpc error: code = NotFound desc = could not find container \"29e7ecd19486b5fb9509ef4348c72129d933eaed1c7ff5939bd0441777876097\": container with ID starting with 29e7ecd19486b5fb9509ef4348c72129d933eaed1c7ff5939bd0441777876097 not found: ID does not exist" Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.697329 4661 scope.go:117] "RemoveContainer" containerID="e92f79fd792f50e8ce17bdd798722241accd90c6812b0a88ef87d6c5f624cc6f" Oct 01 06:38:18 crc kubenswrapper[4661]: E1001 06:38:18.697717 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e92f79fd792f50e8ce17bdd798722241accd90c6812b0a88ef87d6c5f624cc6f\": container with ID starting with e92f79fd792f50e8ce17bdd798722241accd90c6812b0a88ef87d6c5f624cc6f not found: ID does not exist" containerID="e92f79fd792f50e8ce17bdd798722241accd90c6812b0a88ef87d6c5f624cc6f" Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.697743 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e92f79fd792f50e8ce17bdd798722241accd90c6812b0a88ef87d6c5f624cc6f"} err="failed to get container status \"e92f79fd792f50e8ce17bdd798722241accd90c6812b0a88ef87d6c5f624cc6f\": rpc error: code = NotFound desc = could not find container \"e92f79fd792f50e8ce17bdd798722241accd90c6812b0a88ef87d6c5f624cc6f\": container with ID starting with e92f79fd792f50e8ce17bdd798722241accd90c6812b0a88ef87d6c5f624cc6f not found: ID does not exist" Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.697759 4661 scope.go:117] "RemoveContainer" containerID="d5af7cefafc4ba68acd8e2c8091c5b322940de57a9dec841ab1622f587475774" Oct 01 06:38:18 crc kubenswrapper[4661]: E1001 06:38:18.698003 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5af7cefafc4ba68acd8e2c8091c5b322940de57a9dec841ab1622f587475774\": container with ID starting with d5af7cefafc4ba68acd8e2c8091c5b322940de57a9dec841ab1622f587475774 not found: ID does not exist" containerID="d5af7cefafc4ba68acd8e2c8091c5b322940de57a9dec841ab1622f587475774" Oct 01 06:38:18 crc kubenswrapper[4661]: I1001 06:38:18.698028 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5af7cefafc4ba68acd8e2c8091c5b322940de57a9dec841ab1622f587475774"} err="failed to get container status \"d5af7cefafc4ba68acd8e2c8091c5b322940de57a9dec841ab1622f587475774\": rpc error: code = NotFound desc = could not find container \"d5af7cefafc4ba68acd8e2c8091c5b322940de57a9dec841ab1622f587475774\": container with ID starting with d5af7cefafc4ba68acd8e2c8091c5b322940de57a9dec841ab1622f587475774 not found: ID does not exist" Oct 01 06:38:19 crc kubenswrapper[4661]: I1001 06:38:19.404250 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lcs8v"] Oct 01 06:38:19 crc kubenswrapper[4661]: I1001 06:38:19.581911 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lcs8v" podUID="747cb074-9efd-4fc8-8bef-03cb3a42ff81" containerName="registry-server" containerID="cri-o://ef9fa914b93a350b8c876848eed9139e4146d01770f9e9073861e378e3855efe" gracePeriod=2 Oct 01 06:38:19 crc kubenswrapper[4661]: I1001 06:38:19.772727 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf51bc76-18cc-4151-b638-1f7da4d45a80" path="/var/lib/kubelet/pods/bf51bc76-18cc-4151-b638-1f7da4d45a80/volumes" Oct 01 06:38:20 crc kubenswrapper[4661]: I1001 06:38:20.592730 4661 generic.go:334] "Generic (PLEG): container finished" podID="747cb074-9efd-4fc8-8bef-03cb3a42ff81" containerID="ef9fa914b93a350b8c876848eed9139e4146d01770f9e9073861e378e3855efe" exitCode=0 Oct 01 06:38:20 crc kubenswrapper[4661]: I1001 06:38:20.592816 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lcs8v" event={"ID":"747cb074-9efd-4fc8-8bef-03cb3a42ff81","Type":"ContainerDied","Data":"ef9fa914b93a350b8c876848eed9139e4146d01770f9e9073861e378e3855efe"} Oct 01 06:38:20 crc kubenswrapper[4661]: I1001 06:38:20.593026 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lcs8v" event={"ID":"747cb074-9efd-4fc8-8bef-03cb3a42ff81","Type":"ContainerDied","Data":"c2d011bb992a9a824f129cbf04f9ad60251b97a807243ba916fd275fd22ffb91"} Oct 01 06:38:20 crc kubenswrapper[4661]: I1001 06:38:20.593044 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c2d011bb992a9a824f129cbf04f9ad60251b97a807243ba916fd275fd22ffb91" Oct 01 06:38:20 crc kubenswrapper[4661]: I1001 06:38:20.609241 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lcs8v" Oct 01 06:38:20 crc kubenswrapper[4661]: I1001 06:38:20.636082 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/747cb074-9efd-4fc8-8bef-03cb3a42ff81-utilities\") pod \"747cb074-9efd-4fc8-8bef-03cb3a42ff81\" (UID: \"747cb074-9efd-4fc8-8bef-03cb3a42ff81\") " Oct 01 06:38:20 crc kubenswrapper[4661]: I1001 06:38:20.636137 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/747cb074-9efd-4fc8-8bef-03cb3a42ff81-catalog-content\") pod \"747cb074-9efd-4fc8-8bef-03cb3a42ff81\" (UID: \"747cb074-9efd-4fc8-8bef-03cb3a42ff81\") " Oct 01 06:38:20 crc kubenswrapper[4661]: I1001 06:38:20.636167 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4hpn\" (UniqueName: \"kubernetes.io/projected/747cb074-9efd-4fc8-8bef-03cb3a42ff81-kube-api-access-x4hpn\") pod \"747cb074-9efd-4fc8-8bef-03cb3a42ff81\" (UID: \"747cb074-9efd-4fc8-8bef-03cb3a42ff81\") " Oct 01 06:38:20 crc kubenswrapper[4661]: I1001 06:38:20.636980 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/747cb074-9efd-4fc8-8bef-03cb3a42ff81-utilities" (OuterVolumeSpecName: "utilities") pod "747cb074-9efd-4fc8-8bef-03cb3a42ff81" (UID: "747cb074-9efd-4fc8-8bef-03cb3a42ff81"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:38:20 crc kubenswrapper[4661]: I1001 06:38:20.643880 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/747cb074-9efd-4fc8-8bef-03cb3a42ff81-kube-api-access-x4hpn" (OuterVolumeSpecName: "kube-api-access-x4hpn") pod "747cb074-9efd-4fc8-8bef-03cb3a42ff81" (UID: "747cb074-9efd-4fc8-8bef-03cb3a42ff81"). InnerVolumeSpecName "kube-api-access-x4hpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:38:20 crc kubenswrapper[4661]: I1001 06:38:20.682744 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/747cb074-9efd-4fc8-8bef-03cb3a42ff81-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "747cb074-9efd-4fc8-8bef-03cb3a42ff81" (UID: "747cb074-9efd-4fc8-8bef-03cb3a42ff81"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:38:20 crc kubenswrapper[4661]: I1001 06:38:20.738608 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/747cb074-9efd-4fc8-8bef-03cb3a42ff81-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:38:20 crc kubenswrapper[4661]: I1001 06:38:20.738674 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/747cb074-9efd-4fc8-8bef-03cb3a42ff81-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:38:20 crc kubenswrapper[4661]: I1001 06:38:20.738690 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4hpn\" (UniqueName: \"kubernetes.io/projected/747cb074-9efd-4fc8-8bef-03cb3a42ff81-kube-api-access-x4hpn\") on node \"crc\" DevicePath \"\"" Oct 01 06:38:21 crc kubenswrapper[4661]: I1001 06:38:21.603279 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lcs8v" Oct 01 06:38:21 crc kubenswrapper[4661]: I1001 06:38:21.644556 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lcs8v"] Oct 01 06:38:21 crc kubenswrapper[4661]: I1001 06:38:21.651203 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lcs8v"] Oct 01 06:38:21 crc kubenswrapper[4661]: I1001 06:38:21.771985 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="747cb074-9efd-4fc8-8bef-03cb3a42ff81" path="/var/lib/kubelet/pods/747cb074-9efd-4fc8-8bef-03cb3a42ff81/volumes" Oct 01 06:39:34 crc kubenswrapper[4661]: I1001 06:39:34.310743 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:39:34 crc kubenswrapper[4661]: I1001 06:39:34.311156 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:40:04 crc kubenswrapper[4661]: I1001 06:40:04.309206 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:40:04 crc kubenswrapper[4661]: I1001 06:40:04.309853 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:40:34 crc kubenswrapper[4661]: I1001 06:40:34.309212 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:40:34 crc kubenswrapper[4661]: I1001 06:40:34.311768 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:40:34 crc kubenswrapper[4661]: I1001 06:40:34.311842 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 06:40:34 crc kubenswrapper[4661]: I1001 06:40:34.312806 4661 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc"} pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 06:40:34 crc kubenswrapper[4661]: I1001 06:40:34.312885 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" containerID="cri-o://95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" gracePeriod=600 Oct 01 06:40:34 crc kubenswrapper[4661]: E1001 06:40:34.436362 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:40:35 crc kubenswrapper[4661]: I1001 06:40:35.203206 4661 generic.go:334] "Generic (PLEG): container finished" podID="7584c4bc-4202-487e-a2b4-4319f428a792" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" exitCode=0 Oct 01 06:40:35 crc kubenswrapper[4661]: I1001 06:40:35.204006 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerDied","Data":"95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc"} Oct 01 06:40:35 crc kubenswrapper[4661]: I1001 06:40:35.204375 4661 scope.go:117] "RemoveContainer" containerID="908374508ae1b7f8adc1f1b949ba4b52cd51d12cb39513293ca7c897d05509b6" Oct 01 06:40:35 crc kubenswrapper[4661]: I1001 06:40:35.205054 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:40:35 crc kubenswrapper[4661]: E1001 06:40:35.205504 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:40:48 crc kubenswrapper[4661]: I1001 06:40:48.756996 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:40:48 crc kubenswrapper[4661]: E1001 06:40:48.757848 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:41:02 crc kubenswrapper[4661]: I1001 06:41:02.758553 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:41:02 crc kubenswrapper[4661]: E1001 06:41:02.759891 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:41:17 crc kubenswrapper[4661]: I1001 06:41:17.758016 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:41:17 crc kubenswrapper[4661]: E1001 06:41:17.759245 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:41:30 crc kubenswrapper[4661]: I1001 06:41:30.757884 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:41:30 crc kubenswrapper[4661]: E1001 06:41:30.758862 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:41:31 crc kubenswrapper[4661]: E1001 06:41:31.319540 4661 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.65:40334->38.102.83.65:34747: read tcp 38.102.83.65:40334->38.102.83.65:34747: read: connection reset by peer Oct 01 06:41:35 crc kubenswrapper[4661]: E1001 06:41:35.255467 4661 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.65:40428->38.102.83.65:34747: write tcp 38.102.83.65:40428->38.102.83.65:34747: write: broken pipe Oct 01 06:41:43 crc kubenswrapper[4661]: I1001 06:41:43.757493 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:41:43 crc kubenswrapper[4661]: E1001 06:41:43.758476 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:41:55 crc kubenswrapper[4661]: I1001 06:41:55.757702 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:41:55 crc kubenswrapper[4661]: E1001 06:41:55.758713 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:42:07 crc kubenswrapper[4661]: I1001 06:42:07.757442 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:42:07 crc kubenswrapper[4661]: E1001 06:42:07.758502 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:42:19 crc kubenswrapper[4661]: I1001 06:42:19.757824 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:42:19 crc kubenswrapper[4661]: E1001 06:42:19.758834 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:42:32 crc kubenswrapper[4661]: I1001 06:42:32.757100 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:42:32 crc kubenswrapper[4661]: E1001 06:42:32.758121 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:42:46 crc kubenswrapper[4661]: I1001 06:42:46.758050 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:42:46 crc kubenswrapper[4661]: E1001 06:42:46.758873 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:43:00 crc kubenswrapper[4661]: I1001 06:43:00.757038 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:43:00 crc kubenswrapper[4661]: E1001 06:43:00.757855 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:43:12 crc kubenswrapper[4661]: I1001 06:43:12.757400 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:43:12 crc kubenswrapper[4661]: E1001 06:43:12.759858 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:43:23 crc kubenswrapper[4661]: I1001 06:43:23.757676 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:43:23 crc kubenswrapper[4661]: E1001 06:43:23.758435 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:43:38 crc kubenswrapper[4661]: I1001 06:43:38.756786 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:43:38 crc kubenswrapper[4661]: E1001 06:43:38.757521 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:43:49 crc kubenswrapper[4661]: I1001 06:43:49.756952 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:43:49 crc kubenswrapper[4661]: E1001 06:43:49.757722 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:44:02 crc kubenswrapper[4661]: I1001 06:44:02.757800 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:44:02 crc kubenswrapper[4661]: E1001 06:44:02.758817 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:44:14 crc kubenswrapper[4661]: I1001 06:44:14.757767 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:44:14 crc kubenswrapper[4661]: E1001 06:44:14.758984 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:44:24 crc kubenswrapper[4661]: I1001 06:44:24.021003 4661 scope.go:117] "RemoveContainer" containerID="ab8f156e01bfedd90e12070dbfa01880f489bdff044aa360295d3623a4dde7a0" Oct 01 06:44:24 crc kubenswrapper[4661]: I1001 06:44:24.050544 4661 scope.go:117] "RemoveContainer" containerID="63eeee878ba1f4a2bad2ca7f954e2314e8290c8ec2a09eb111aef38647d0b620" Oct 01 06:44:24 crc kubenswrapper[4661]: I1001 06:44:24.107548 4661 scope.go:117] "RemoveContainer" containerID="ef9fa914b93a350b8c876848eed9139e4146d01770f9e9073861e378e3855efe" Oct 01 06:44:26 crc kubenswrapper[4661]: I1001 06:44:26.757708 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:44:26 crc kubenswrapper[4661]: E1001 06:44:26.758127 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:44:41 crc kubenswrapper[4661]: I1001 06:44:41.766164 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:44:41 crc kubenswrapper[4661]: E1001 06:44:41.766972 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:44:56 crc kubenswrapper[4661]: I1001 06:44:56.757863 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:44:56 crc kubenswrapper[4661]: E1001 06:44:56.759178 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.155518 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321685-8cmh5"] Oct 01 06:45:00 crc kubenswrapper[4661]: E1001 06:45:00.156685 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf51bc76-18cc-4151-b638-1f7da4d45a80" containerName="registry-server" Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.156704 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf51bc76-18cc-4151-b638-1f7da4d45a80" containerName="registry-server" Oct 01 06:45:00 crc kubenswrapper[4661]: E1001 06:45:00.156719 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf51bc76-18cc-4151-b638-1f7da4d45a80" containerName="extract-utilities" Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.156727 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf51bc76-18cc-4151-b638-1f7da4d45a80" containerName="extract-utilities" Oct 01 06:45:00 crc kubenswrapper[4661]: E1001 06:45:00.156747 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="747cb074-9efd-4fc8-8bef-03cb3a42ff81" containerName="registry-server" Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.156758 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="747cb074-9efd-4fc8-8bef-03cb3a42ff81" containerName="registry-server" Oct 01 06:45:00 crc kubenswrapper[4661]: E1001 06:45:00.156777 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="747cb074-9efd-4fc8-8bef-03cb3a42ff81" containerName="extract-content" Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.156784 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="747cb074-9efd-4fc8-8bef-03cb3a42ff81" containerName="extract-content" Oct 01 06:45:00 crc kubenswrapper[4661]: E1001 06:45:00.156819 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="747cb074-9efd-4fc8-8bef-03cb3a42ff81" containerName="extract-utilities" Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.156830 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="747cb074-9efd-4fc8-8bef-03cb3a42ff81" containerName="extract-utilities" Oct 01 06:45:00 crc kubenswrapper[4661]: E1001 06:45:00.156851 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf51bc76-18cc-4151-b638-1f7da4d45a80" containerName="extract-content" Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.156860 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf51bc76-18cc-4151-b638-1f7da4d45a80" containerName="extract-content" Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.157081 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf51bc76-18cc-4151-b638-1f7da4d45a80" containerName="registry-server" Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.157113 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="747cb074-9efd-4fc8-8bef-03cb3a42ff81" containerName="registry-server" Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.158028 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-8cmh5" Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.160905 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.160913 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.165654 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321685-8cmh5"] Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.262258 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tlw9\" (UniqueName: \"kubernetes.io/projected/68d5e90d-0bfe-45c1-a990-073958706352-kube-api-access-9tlw9\") pod \"collect-profiles-29321685-8cmh5\" (UID: \"68d5e90d-0bfe-45c1-a990-073958706352\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-8cmh5" Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.262355 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/68d5e90d-0bfe-45c1-a990-073958706352-secret-volume\") pod \"collect-profiles-29321685-8cmh5\" (UID: \"68d5e90d-0bfe-45c1-a990-073958706352\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-8cmh5" Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.262495 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/68d5e90d-0bfe-45c1-a990-073958706352-config-volume\") pod \"collect-profiles-29321685-8cmh5\" (UID: \"68d5e90d-0bfe-45c1-a990-073958706352\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-8cmh5" Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.364539 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/68d5e90d-0bfe-45c1-a990-073958706352-secret-volume\") pod \"collect-profiles-29321685-8cmh5\" (UID: \"68d5e90d-0bfe-45c1-a990-073958706352\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-8cmh5" Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.364709 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/68d5e90d-0bfe-45c1-a990-073958706352-config-volume\") pod \"collect-profiles-29321685-8cmh5\" (UID: \"68d5e90d-0bfe-45c1-a990-073958706352\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-8cmh5" Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.364889 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tlw9\" (UniqueName: \"kubernetes.io/projected/68d5e90d-0bfe-45c1-a990-073958706352-kube-api-access-9tlw9\") pod \"collect-profiles-29321685-8cmh5\" (UID: \"68d5e90d-0bfe-45c1-a990-073958706352\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-8cmh5" Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.365622 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/68d5e90d-0bfe-45c1-a990-073958706352-config-volume\") pod \"collect-profiles-29321685-8cmh5\" (UID: \"68d5e90d-0bfe-45c1-a990-073958706352\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-8cmh5" Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.374622 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/68d5e90d-0bfe-45c1-a990-073958706352-secret-volume\") pod \"collect-profiles-29321685-8cmh5\" (UID: \"68d5e90d-0bfe-45c1-a990-073958706352\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-8cmh5" Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.388823 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tlw9\" (UniqueName: \"kubernetes.io/projected/68d5e90d-0bfe-45c1-a990-073958706352-kube-api-access-9tlw9\") pod \"collect-profiles-29321685-8cmh5\" (UID: \"68d5e90d-0bfe-45c1-a990-073958706352\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-8cmh5" Oct 01 06:45:00 crc kubenswrapper[4661]: I1001 06:45:00.504712 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-8cmh5" Oct 01 06:45:01 crc kubenswrapper[4661]: I1001 06:45:01.002848 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321685-8cmh5"] Oct 01 06:45:01 crc kubenswrapper[4661]: I1001 06:45:01.390828 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-8cmh5" event={"ID":"68d5e90d-0bfe-45c1-a990-073958706352","Type":"ContainerStarted","Data":"38a2fef062e8230e6c88f29ead1e497ba401a636c2cdfe0fb7ee8d62f76c8361"} Oct 01 06:45:01 crc kubenswrapper[4661]: I1001 06:45:01.391140 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-8cmh5" event={"ID":"68d5e90d-0bfe-45c1-a990-073958706352","Type":"ContainerStarted","Data":"0d0e8efbf96fe34136e5470b5570c83478c8e429632e7523c514d2b9fb2b55c8"} Oct 01 06:45:01 crc kubenswrapper[4661]: I1001 06:45:01.426699 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-8cmh5" podStartSLOduration=1.4266818909999999 podStartE2EDuration="1.426681891s" podCreationTimestamp="2025-10-01 06:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:45:01.413381077 +0000 UTC m=+4550.351359701" watchObservedRunningTime="2025-10-01 06:45:01.426681891 +0000 UTC m=+4550.364660505" Oct 01 06:45:02 crc kubenswrapper[4661]: I1001 06:45:02.408129 4661 generic.go:334] "Generic (PLEG): container finished" podID="68d5e90d-0bfe-45c1-a990-073958706352" containerID="38a2fef062e8230e6c88f29ead1e497ba401a636c2cdfe0fb7ee8d62f76c8361" exitCode=0 Oct 01 06:45:02 crc kubenswrapper[4661]: I1001 06:45:02.408238 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-8cmh5" event={"ID":"68d5e90d-0bfe-45c1-a990-073958706352","Type":"ContainerDied","Data":"38a2fef062e8230e6c88f29ead1e497ba401a636c2cdfe0fb7ee8d62f76c8361"} Oct 01 06:45:03 crc kubenswrapper[4661]: I1001 06:45:03.834991 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-8cmh5" Oct 01 06:45:03 crc kubenswrapper[4661]: I1001 06:45:03.955592 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/68d5e90d-0bfe-45c1-a990-073958706352-secret-volume\") pod \"68d5e90d-0bfe-45c1-a990-073958706352\" (UID: \"68d5e90d-0bfe-45c1-a990-073958706352\") " Oct 01 06:45:03 crc kubenswrapper[4661]: I1001 06:45:03.955699 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/68d5e90d-0bfe-45c1-a990-073958706352-config-volume\") pod \"68d5e90d-0bfe-45c1-a990-073958706352\" (UID: \"68d5e90d-0bfe-45c1-a990-073958706352\") " Oct 01 06:45:03 crc kubenswrapper[4661]: I1001 06:45:03.955797 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tlw9\" (UniqueName: \"kubernetes.io/projected/68d5e90d-0bfe-45c1-a990-073958706352-kube-api-access-9tlw9\") pod \"68d5e90d-0bfe-45c1-a990-073958706352\" (UID: \"68d5e90d-0bfe-45c1-a990-073958706352\") " Oct 01 06:45:03 crc kubenswrapper[4661]: I1001 06:45:03.956201 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68d5e90d-0bfe-45c1-a990-073958706352-config-volume" (OuterVolumeSpecName: "config-volume") pod "68d5e90d-0bfe-45c1-a990-073958706352" (UID: "68d5e90d-0bfe-45c1-a990-073958706352"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:45:04 crc kubenswrapper[4661]: I1001 06:45:04.058228 4661 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/68d5e90d-0bfe-45c1-a990-073958706352-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 06:45:04 crc kubenswrapper[4661]: I1001 06:45:04.421089 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68d5e90d-0bfe-45c1-a990-073958706352-kube-api-access-9tlw9" (OuterVolumeSpecName: "kube-api-access-9tlw9") pod "68d5e90d-0bfe-45c1-a990-073958706352" (UID: "68d5e90d-0bfe-45c1-a990-073958706352"). InnerVolumeSpecName "kube-api-access-9tlw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:45:04 crc kubenswrapper[4661]: I1001 06:45:04.429332 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68d5e90d-0bfe-45c1-a990-073958706352-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "68d5e90d-0bfe-45c1-a990-073958706352" (UID: "68d5e90d-0bfe-45c1-a990-073958706352"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:45:04 crc kubenswrapper[4661]: I1001 06:45:04.449857 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-8cmh5" event={"ID":"68d5e90d-0bfe-45c1-a990-073958706352","Type":"ContainerDied","Data":"0d0e8efbf96fe34136e5470b5570c83478c8e429632e7523c514d2b9fb2b55c8"} Oct 01 06:45:04 crc kubenswrapper[4661]: I1001 06:45:04.449913 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d0e8efbf96fe34136e5470b5570c83478c8e429632e7523c514d2b9fb2b55c8" Oct 01 06:45:04 crc kubenswrapper[4661]: I1001 06:45:04.449997 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-8cmh5" Oct 01 06:45:04 crc kubenswrapper[4661]: I1001 06:45:04.467945 4661 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/68d5e90d-0bfe-45c1-a990-073958706352-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 06:45:04 crc kubenswrapper[4661]: I1001 06:45:04.468433 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tlw9\" (UniqueName: \"kubernetes.io/projected/68d5e90d-0bfe-45c1-a990-073958706352-kube-api-access-9tlw9\") on node \"crc\" DevicePath \"\"" Oct 01 06:45:04 crc kubenswrapper[4661]: I1001 06:45:04.527876 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321640-4lq59"] Oct 01 06:45:04 crc kubenswrapper[4661]: I1001 06:45:04.541003 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321640-4lq59"] Oct 01 06:45:05 crc kubenswrapper[4661]: I1001 06:45:05.774173 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66df94f3-a98f-4546-912c-0784e65d770d" path="/var/lib/kubelet/pods/66df94f3-a98f-4546-912c-0784e65d770d/volumes" Oct 01 06:45:10 crc kubenswrapper[4661]: I1001 06:45:10.757504 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:45:10 crc kubenswrapper[4661]: E1001 06:45:10.760744 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:45:24 crc kubenswrapper[4661]: I1001 06:45:24.192348 4661 scope.go:117] "RemoveContainer" containerID="62a60e87fd8ed151c3f68d5758f214ec957850e034067ec3f1be5affeeb7c717" Oct 01 06:45:25 crc kubenswrapper[4661]: I1001 06:45:25.757679 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:45:25 crc kubenswrapper[4661]: E1001 06:45:25.758838 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:45:38 crc kubenswrapper[4661]: I1001 06:45:38.757540 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:45:39 crc kubenswrapper[4661]: I1001 06:45:39.860076 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"6a337ad704a6224229051de7892e1fff198cf1bc578e837f53a93425bd4f9d64"} Oct 01 06:45:58 crc kubenswrapper[4661]: I1001 06:45:58.424866 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rs4dm"] Oct 01 06:45:58 crc kubenswrapper[4661]: E1001 06:45:58.438171 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68d5e90d-0bfe-45c1-a990-073958706352" containerName="collect-profiles" Oct 01 06:45:58 crc kubenswrapper[4661]: I1001 06:45:58.438195 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="68d5e90d-0bfe-45c1-a990-073958706352" containerName="collect-profiles" Oct 01 06:45:58 crc kubenswrapper[4661]: I1001 06:45:58.438679 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="68d5e90d-0bfe-45c1-a990-073958706352" containerName="collect-profiles" Oct 01 06:45:58 crc kubenswrapper[4661]: I1001 06:45:58.444117 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rs4dm"] Oct 01 06:45:58 crc kubenswrapper[4661]: I1001 06:45:58.444218 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rs4dm" Oct 01 06:45:58 crc kubenswrapper[4661]: I1001 06:45:58.490937 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6abbfe0-86e4-4958-bf40-3bb51d3b40b1-utilities\") pod \"community-operators-rs4dm\" (UID: \"a6abbfe0-86e4-4958-bf40-3bb51d3b40b1\") " pod="openshift-marketplace/community-operators-rs4dm" Oct 01 06:45:58 crc kubenswrapper[4661]: I1001 06:45:58.491683 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6abbfe0-86e4-4958-bf40-3bb51d3b40b1-catalog-content\") pod \"community-operators-rs4dm\" (UID: \"a6abbfe0-86e4-4958-bf40-3bb51d3b40b1\") " pod="openshift-marketplace/community-operators-rs4dm" Oct 01 06:45:58 crc kubenswrapper[4661]: I1001 06:45:58.491785 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4skc2\" (UniqueName: \"kubernetes.io/projected/a6abbfe0-86e4-4958-bf40-3bb51d3b40b1-kube-api-access-4skc2\") pod \"community-operators-rs4dm\" (UID: \"a6abbfe0-86e4-4958-bf40-3bb51d3b40b1\") " pod="openshift-marketplace/community-operators-rs4dm" Oct 01 06:45:58 crc kubenswrapper[4661]: I1001 06:45:58.594755 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6abbfe0-86e4-4958-bf40-3bb51d3b40b1-catalog-content\") pod \"community-operators-rs4dm\" (UID: \"a6abbfe0-86e4-4958-bf40-3bb51d3b40b1\") " pod="openshift-marketplace/community-operators-rs4dm" Oct 01 06:45:58 crc kubenswrapper[4661]: I1001 06:45:58.595151 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6abbfe0-86e4-4958-bf40-3bb51d3b40b1-catalog-content\") pod \"community-operators-rs4dm\" (UID: \"a6abbfe0-86e4-4958-bf40-3bb51d3b40b1\") " pod="openshift-marketplace/community-operators-rs4dm" Oct 01 06:45:58 crc kubenswrapper[4661]: I1001 06:45:58.595435 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4skc2\" (UniqueName: \"kubernetes.io/projected/a6abbfe0-86e4-4958-bf40-3bb51d3b40b1-kube-api-access-4skc2\") pod \"community-operators-rs4dm\" (UID: \"a6abbfe0-86e4-4958-bf40-3bb51d3b40b1\") " pod="openshift-marketplace/community-operators-rs4dm" Oct 01 06:45:58 crc kubenswrapper[4661]: I1001 06:45:58.596023 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6abbfe0-86e4-4958-bf40-3bb51d3b40b1-utilities\") pod \"community-operators-rs4dm\" (UID: \"a6abbfe0-86e4-4958-bf40-3bb51d3b40b1\") " pod="openshift-marketplace/community-operators-rs4dm" Oct 01 06:45:58 crc kubenswrapper[4661]: I1001 06:45:58.596295 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6abbfe0-86e4-4958-bf40-3bb51d3b40b1-utilities\") pod \"community-operators-rs4dm\" (UID: \"a6abbfe0-86e4-4958-bf40-3bb51d3b40b1\") " pod="openshift-marketplace/community-operators-rs4dm" Oct 01 06:45:58 crc kubenswrapper[4661]: I1001 06:45:58.615791 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4skc2\" (UniqueName: \"kubernetes.io/projected/a6abbfe0-86e4-4958-bf40-3bb51d3b40b1-kube-api-access-4skc2\") pod \"community-operators-rs4dm\" (UID: \"a6abbfe0-86e4-4958-bf40-3bb51d3b40b1\") " pod="openshift-marketplace/community-operators-rs4dm" Oct 01 06:45:58 crc kubenswrapper[4661]: I1001 06:45:58.801916 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rs4dm" Oct 01 06:45:59 crc kubenswrapper[4661]: W1001 06:45:59.292473 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda6abbfe0_86e4_4958_bf40_3bb51d3b40b1.slice/crio-ae51ef9ce499e356fa9213f65ab106f7ca4881fa7e85e804b2c8411a3f074dcf WatchSource:0}: Error finding container ae51ef9ce499e356fa9213f65ab106f7ca4881fa7e85e804b2c8411a3f074dcf: Status 404 returned error can't find the container with id ae51ef9ce499e356fa9213f65ab106f7ca4881fa7e85e804b2c8411a3f074dcf Oct 01 06:45:59 crc kubenswrapper[4661]: I1001 06:45:59.297212 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rs4dm"] Oct 01 06:46:00 crc kubenswrapper[4661]: I1001 06:46:00.096456 4661 generic.go:334] "Generic (PLEG): container finished" podID="a6abbfe0-86e4-4958-bf40-3bb51d3b40b1" containerID="a5045d3d757e1b8b7207d2ca2154114983d966ad1b2a85f9b6aad875f9da98a1" exitCode=0 Oct 01 06:46:00 crc kubenswrapper[4661]: I1001 06:46:00.096546 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rs4dm" event={"ID":"a6abbfe0-86e4-4958-bf40-3bb51d3b40b1","Type":"ContainerDied","Data":"a5045d3d757e1b8b7207d2ca2154114983d966ad1b2a85f9b6aad875f9da98a1"} Oct 01 06:46:00 crc kubenswrapper[4661]: I1001 06:46:00.096745 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rs4dm" event={"ID":"a6abbfe0-86e4-4958-bf40-3bb51d3b40b1","Type":"ContainerStarted","Data":"ae51ef9ce499e356fa9213f65ab106f7ca4881fa7e85e804b2c8411a3f074dcf"} Oct 01 06:46:00 crc kubenswrapper[4661]: I1001 06:46:00.102071 4661 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 06:46:02 crc kubenswrapper[4661]: I1001 06:46:02.122865 4661 generic.go:334] "Generic (PLEG): container finished" podID="a6abbfe0-86e4-4958-bf40-3bb51d3b40b1" containerID="9aee0763f9adbed183011cac6155f40c7de61084453a3e8a484c83550c915080" exitCode=0 Oct 01 06:46:02 crc kubenswrapper[4661]: I1001 06:46:02.123103 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rs4dm" event={"ID":"a6abbfe0-86e4-4958-bf40-3bb51d3b40b1","Type":"ContainerDied","Data":"9aee0763f9adbed183011cac6155f40c7de61084453a3e8a484c83550c915080"} Oct 01 06:46:04 crc kubenswrapper[4661]: I1001 06:46:04.149831 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rs4dm" event={"ID":"a6abbfe0-86e4-4958-bf40-3bb51d3b40b1","Type":"ContainerStarted","Data":"a294f4c18cc205161c2ec83a4ac8b7a497399cf36a031ef04aef3450f18fa331"} Oct 01 06:46:04 crc kubenswrapper[4661]: I1001 06:46:04.184854 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rs4dm" podStartSLOduration=2.7029192220000002 podStartE2EDuration="6.184835739s" podCreationTimestamp="2025-10-01 06:45:58 +0000 UTC" firstStartedPulling="2025-10-01 06:46:00.101830676 +0000 UTC m=+4609.039809290" lastFinishedPulling="2025-10-01 06:46:03.583747183 +0000 UTC m=+4612.521725807" observedRunningTime="2025-10-01 06:46:04.173343604 +0000 UTC m=+4613.111322248" watchObservedRunningTime="2025-10-01 06:46:04.184835739 +0000 UTC m=+4613.122814363" Oct 01 06:46:08 crc kubenswrapper[4661]: I1001 06:46:08.803337 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rs4dm" Oct 01 06:46:08 crc kubenswrapper[4661]: I1001 06:46:08.804046 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rs4dm" Oct 01 06:46:08 crc kubenswrapper[4661]: I1001 06:46:08.871521 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rs4dm" Oct 01 06:46:09 crc kubenswrapper[4661]: I1001 06:46:09.291877 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rs4dm" Oct 01 06:46:09 crc kubenswrapper[4661]: I1001 06:46:09.364911 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rs4dm"] Oct 01 06:46:11 crc kubenswrapper[4661]: I1001 06:46:11.227931 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rs4dm" podUID="a6abbfe0-86e4-4958-bf40-3bb51d3b40b1" containerName="registry-server" containerID="cri-o://a294f4c18cc205161c2ec83a4ac8b7a497399cf36a031ef04aef3450f18fa331" gracePeriod=2 Oct 01 06:46:11 crc kubenswrapper[4661]: I1001 06:46:11.803026 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rs4dm" Oct 01 06:46:11 crc kubenswrapper[4661]: I1001 06:46:11.894548 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4skc2\" (UniqueName: \"kubernetes.io/projected/a6abbfe0-86e4-4958-bf40-3bb51d3b40b1-kube-api-access-4skc2\") pod \"a6abbfe0-86e4-4958-bf40-3bb51d3b40b1\" (UID: \"a6abbfe0-86e4-4958-bf40-3bb51d3b40b1\") " Oct 01 06:46:11 crc kubenswrapper[4661]: I1001 06:46:11.894697 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6abbfe0-86e4-4958-bf40-3bb51d3b40b1-utilities\") pod \"a6abbfe0-86e4-4958-bf40-3bb51d3b40b1\" (UID: \"a6abbfe0-86e4-4958-bf40-3bb51d3b40b1\") " Oct 01 06:46:11 crc kubenswrapper[4661]: I1001 06:46:11.894771 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6abbfe0-86e4-4958-bf40-3bb51d3b40b1-catalog-content\") pod \"a6abbfe0-86e4-4958-bf40-3bb51d3b40b1\" (UID: \"a6abbfe0-86e4-4958-bf40-3bb51d3b40b1\") " Oct 01 06:46:11 crc kubenswrapper[4661]: I1001 06:46:11.896245 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6abbfe0-86e4-4958-bf40-3bb51d3b40b1-utilities" (OuterVolumeSpecName: "utilities") pod "a6abbfe0-86e4-4958-bf40-3bb51d3b40b1" (UID: "a6abbfe0-86e4-4958-bf40-3bb51d3b40b1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:46:11 crc kubenswrapper[4661]: I1001 06:46:11.900788 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6abbfe0-86e4-4958-bf40-3bb51d3b40b1-kube-api-access-4skc2" (OuterVolumeSpecName: "kube-api-access-4skc2") pod "a6abbfe0-86e4-4958-bf40-3bb51d3b40b1" (UID: "a6abbfe0-86e4-4958-bf40-3bb51d3b40b1"). InnerVolumeSpecName "kube-api-access-4skc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:46:11 crc kubenswrapper[4661]: I1001 06:46:11.945386 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6abbfe0-86e4-4958-bf40-3bb51d3b40b1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a6abbfe0-86e4-4958-bf40-3bb51d3b40b1" (UID: "a6abbfe0-86e4-4958-bf40-3bb51d3b40b1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:46:11 crc kubenswrapper[4661]: I1001 06:46:11.997168 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4skc2\" (UniqueName: \"kubernetes.io/projected/a6abbfe0-86e4-4958-bf40-3bb51d3b40b1-kube-api-access-4skc2\") on node \"crc\" DevicePath \"\"" Oct 01 06:46:11 crc kubenswrapper[4661]: I1001 06:46:11.997207 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6abbfe0-86e4-4958-bf40-3bb51d3b40b1-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:46:11 crc kubenswrapper[4661]: I1001 06:46:11.997222 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6abbfe0-86e4-4958-bf40-3bb51d3b40b1-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:46:12 crc kubenswrapper[4661]: I1001 06:46:12.239923 4661 generic.go:334] "Generic (PLEG): container finished" podID="a6abbfe0-86e4-4958-bf40-3bb51d3b40b1" containerID="a294f4c18cc205161c2ec83a4ac8b7a497399cf36a031ef04aef3450f18fa331" exitCode=0 Oct 01 06:46:12 crc kubenswrapper[4661]: I1001 06:46:12.239965 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rs4dm" event={"ID":"a6abbfe0-86e4-4958-bf40-3bb51d3b40b1","Type":"ContainerDied","Data":"a294f4c18cc205161c2ec83a4ac8b7a497399cf36a031ef04aef3450f18fa331"} Oct 01 06:46:12 crc kubenswrapper[4661]: I1001 06:46:12.239993 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rs4dm" event={"ID":"a6abbfe0-86e4-4958-bf40-3bb51d3b40b1","Type":"ContainerDied","Data":"ae51ef9ce499e356fa9213f65ab106f7ca4881fa7e85e804b2c8411a3f074dcf"} Oct 01 06:46:12 crc kubenswrapper[4661]: I1001 06:46:12.240041 4661 scope.go:117] "RemoveContainer" containerID="a294f4c18cc205161c2ec83a4ac8b7a497399cf36a031ef04aef3450f18fa331" Oct 01 06:46:12 crc kubenswrapper[4661]: I1001 06:46:12.240082 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rs4dm" Oct 01 06:46:12 crc kubenswrapper[4661]: I1001 06:46:12.565760 4661 scope.go:117] "RemoveContainer" containerID="9aee0763f9adbed183011cac6155f40c7de61084453a3e8a484c83550c915080" Oct 01 06:46:12 crc kubenswrapper[4661]: I1001 06:46:12.576614 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rs4dm"] Oct 01 06:46:12 crc kubenswrapper[4661]: I1001 06:46:12.600039 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rs4dm"] Oct 01 06:46:12 crc kubenswrapper[4661]: I1001 06:46:12.606979 4661 scope.go:117] "RemoveContainer" containerID="a5045d3d757e1b8b7207d2ca2154114983d966ad1b2a85f9b6aad875f9da98a1" Oct 01 06:46:12 crc kubenswrapper[4661]: I1001 06:46:12.643337 4661 scope.go:117] "RemoveContainer" containerID="a294f4c18cc205161c2ec83a4ac8b7a497399cf36a031ef04aef3450f18fa331" Oct 01 06:46:12 crc kubenswrapper[4661]: E1001 06:46:12.643982 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a294f4c18cc205161c2ec83a4ac8b7a497399cf36a031ef04aef3450f18fa331\": container with ID starting with a294f4c18cc205161c2ec83a4ac8b7a497399cf36a031ef04aef3450f18fa331 not found: ID does not exist" containerID="a294f4c18cc205161c2ec83a4ac8b7a497399cf36a031ef04aef3450f18fa331" Oct 01 06:46:12 crc kubenswrapper[4661]: I1001 06:46:12.644016 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a294f4c18cc205161c2ec83a4ac8b7a497399cf36a031ef04aef3450f18fa331"} err="failed to get container status \"a294f4c18cc205161c2ec83a4ac8b7a497399cf36a031ef04aef3450f18fa331\": rpc error: code = NotFound desc = could not find container \"a294f4c18cc205161c2ec83a4ac8b7a497399cf36a031ef04aef3450f18fa331\": container with ID starting with a294f4c18cc205161c2ec83a4ac8b7a497399cf36a031ef04aef3450f18fa331 not found: ID does not exist" Oct 01 06:46:12 crc kubenswrapper[4661]: I1001 06:46:12.644038 4661 scope.go:117] "RemoveContainer" containerID="9aee0763f9adbed183011cac6155f40c7de61084453a3e8a484c83550c915080" Oct 01 06:46:12 crc kubenswrapper[4661]: E1001 06:46:12.644385 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9aee0763f9adbed183011cac6155f40c7de61084453a3e8a484c83550c915080\": container with ID starting with 9aee0763f9adbed183011cac6155f40c7de61084453a3e8a484c83550c915080 not found: ID does not exist" containerID="9aee0763f9adbed183011cac6155f40c7de61084453a3e8a484c83550c915080" Oct 01 06:46:12 crc kubenswrapper[4661]: I1001 06:46:12.644418 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9aee0763f9adbed183011cac6155f40c7de61084453a3e8a484c83550c915080"} err="failed to get container status \"9aee0763f9adbed183011cac6155f40c7de61084453a3e8a484c83550c915080\": rpc error: code = NotFound desc = could not find container \"9aee0763f9adbed183011cac6155f40c7de61084453a3e8a484c83550c915080\": container with ID starting with 9aee0763f9adbed183011cac6155f40c7de61084453a3e8a484c83550c915080 not found: ID does not exist" Oct 01 06:46:12 crc kubenswrapper[4661]: I1001 06:46:12.644440 4661 scope.go:117] "RemoveContainer" containerID="a5045d3d757e1b8b7207d2ca2154114983d966ad1b2a85f9b6aad875f9da98a1" Oct 01 06:46:12 crc kubenswrapper[4661]: E1001 06:46:12.644907 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5045d3d757e1b8b7207d2ca2154114983d966ad1b2a85f9b6aad875f9da98a1\": container with ID starting with a5045d3d757e1b8b7207d2ca2154114983d966ad1b2a85f9b6aad875f9da98a1 not found: ID does not exist" containerID="a5045d3d757e1b8b7207d2ca2154114983d966ad1b2a85f9b6aad875f9da98a1" Oct 01 06:46:12 crc kubenswrapper[4661]: I1001 06:46:12.644930 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5045d3d757e1b8b7207d2ca2154114983d966ad1b2a85f9b6aad875f9da98a1"} err="failed to get container status \"a5045d3d757e1b8b7207d2ca2154114983d966ad1b2a85f9b6aad875f9da98a1\": rpc error: code = NotFound desc = could not find container \"a5045d3d757e1b8b7207d2ca2154114983d966ad1b2a85f9b6aad875f9da98a1\": container with ID starting with a5045d3d757e1b8b7207d2ca2154114983d966ad1b2a85f9b6aad875f9da98a1 not found: ID does not exist" Oct 01 06:46:13 crc kubenswrapper[4661]: I1001 06:46:13.778544 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6abbfe0-86e4-4958-bf40-3bb51d3b40b1" path="/var/lib/kubelet/pods/a6abbfe0-86e4-4958-bf40-3bb51d3b40b1/volumes" Oct 01 06:48:04 crc kubenswrapper[4661]: I1001 06:48:04.309396 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:48:04 crc kubenswrapper[4661]: I1001 06:48:04.310005 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:48:34 crc kubenswrapper[4661]: I1001 06:48:34.309414 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:48:34 crc kubenswrapper[4661]: I1001 06:48:34.310019 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:48:35 crc kubenswrapper[4661]: I1001 06:48:35.975606 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rv6p9"] Oct 01 06:48:35 crc kubenswrapper[4661]: E1001 06:48:35.976421 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6abbfe0-86e4-4958-bf40-3bb51d3b40b1" containerName="extract-utilities" Oct 01 06:48:35 crc kubenswrapper[4661]: I1001 06:48:35.976438 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6abbfe0-86e4-4958-bf40-3bb51d3b40b1" containerName="extract-utilities" Oct 01 06:48:35 crc kubenswrapper[4661]: E1001 06:48:35.976467 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6abbfe0-86e4-4958-bf40-3bb51d3b40b1" containerName="registry-server" Oct 01 06:48:35 crc kubenswrapper[4661]: I1001 06:48:35.976475 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6abbfe0-86e4-4958-bf40-3bb51d3b40b1" containerName="registry-server" Oct 01 06:48:35 crc kubenswrapper[4661]: E1001 06:48:35.976496 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6abbfe0-86e4-4958-bf40-3bb51d3b40b1" containerName="extract-content" Oct 01 06:48:35 crc kubenswrapper[4661]: I1001 06:48:35.976503 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6abbfe0-86e4-4958-bf40-3bb51d3b40b1" containerName="extract-content" Oct 01 06:48:35 crc kubenswrapper[4661]: I1001 06:48:35.976768 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6abbfe0-86e4-4958-bf40-3bb51d3b40b1" containerName="registry-server" Oct 01 06:48:35 crc kubenswrapper[4661]: I1001 06:48:35.978473 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rv6p9" Oct 01 06:48:35 crc kubenswrapper[4661]: I1001 06:48:35.984386 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14c17dc0-a5a5-448b-a49a-fea64daa1d05-utilities\") pod \"redhat-marketplace-rv6p9\" (UID: \"14c17dc0-a5a5-448b-a49a-fea64daa1d05\") " pod="openshift-marketplace/redhat-marketplace-rv6p9" Oct 01 06:48:35 crc kubenswrapper[4661]: I1001 06:48:35.984551 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14c17dc0-a5a5-448b-a49a-fea64daa1d05-catalog-content\") pod \"redhat-marketplace-rv6p9\" (UID: \"14c17dc0-a5a5-448b-a49a-fea64daa1d05\") " pod="openshift-marketplace/redhat-marketplace-rv6p9" Oct 01 06:48:35 crc kubenswrapper[4661]: I1001 06:48:35.984577 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r72ch\" (UniqueName: \"kubernetes.io/projected/14c17dc0-a5a5-448b-a49a-fea64daa1d05-kube-api-access-r72ch\") pod \"redhat-marketplace-rv6p9\" (UID: \"14c17dc0-a5a5-448b-a49a-fea64daa1d05\") " pod="openshift-marketplace/redhat-marketplace-rv6p9" Oct 01 06:48:35 crc kubenswrapper[4661]: I1001 06:48:35.995586 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rv6p9"] Oct 01 06:48:36 crc kubenswrapper[4661]: I1001 06:48:36.086502 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14c17dc0-a5a5-448b-a49a-fea64daa1d05-catalog-content\") pod \"redhat-marketplace-rv6p9\" (UID: \"14c17dc0-a5a5-448b-a49a-fea64daa1d05\") " pod="openshift-marketplace/redhat-marketplace-rv6p9" Oct 01 06:48:36 crc kubenswrapper[4661]: I1001 06:48:36.086543 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r72ch\" (UniqueName: \"kubernetes.io/projected/14c17dc0-a5a5-448b-a49a-fea64daa1d05-kube-api-access-r72ch\") pod \"redhat-marketplace-rv6p9\" (UID: \"14c17dc0-a5a5-448b-a49a-fea64daa1d05\") " pod="openshift-marketplace/redhat-marketplace-rv6p9" Oct 01 06:48:36 crc kubenswrapper[4661]: I1001 06:48:36.086656 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14c17dc0-a5a5-448b-a49a-fea64daa1d05-utilities\") pod \"redhat-marketplace-rv6p9\" (UID: \"14c17dc0-a5a5-448b-a49a-fea64daa1d05\") " pod="openshift-marketplace/redhat-marketplace-rv6p9" Oct 01 06:48:36 crc kubenswrapper[4661]: I1001 06:48:36.087009 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14c17dc0-a5a5-448b-a49a-fea64daa1d05-catalog-content\") pod \"redhat-marketplace-rv6p9\" (UID: \"14c17dc0-a5a5-448b-a49a-fea64daa1d05\") " pod="openshift-marketplace/redhat-marketplace-rv6p9" Oct 01 06:48:36 crc kubenswrapper[4661]: I1001 06:48:36.087071 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14c17dc0-a5a5-448b-a49a-fea64daa1d05-utilities\") pod \"redhat-marketplace-rv6p9\" (UID: \"14c17dc0-a5a5-448b-a49a-fea64daa1d05\") " pod="openshift-marketplace/redhat-marketplace-rv6p9" Oct 01 06:48:36 crc kubenswrapper[4661]: I1001 06:48:36.120971 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r72ch\" (UniqueName: \"kubernetes.io/projected/14c17dc0-a5a5-448b-a49a-fea64daa1d05-kube-api-access-r72ch\") pod \"redhat-marketplace-rv6p9\" (UID: \"14c17dc0-a5a5-448b-a49a-fea64daa1d05\") " pod="openshift-marketplace/redhat-marketplace-rv6p9" Oct 01 06:48:36 crc kubenswrapper[4661]: I1001 06:48:36.319181 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rv6p9" Oct 01 06:48:36 crc kubenswrapper[4661]: I1001 06:48:36.809805 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rv6p9"] Oct 01 06:48:37 crc kubenswrapper[4661]: I1001 06:48:37.991816 4661 generic.go:334] "Generic (PLEG): container finished" podID="14c17dc0-a5a5-448b-a49a-fea64daa1d05" containerID="6bb3c26753152816b107452a255d4f5d214e5295c2ebacccf946ad3147e0a0e3" exitCode=0 Oct 01 06:48:37 crc kubenswrapper[4661]: I1001 06:48:37.991940 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rv6p9" event={"ID":"14c17dc0-a5a5-448b-a49a-fea64daa1d05","Type":"ContainerDied","Data":"6bb3c26753152816b107452a255d4f5d214e5295c2ebacccf946ad3147e0a0e3"} Oct 01 06:48:37 crc kubenswrapper[4661]: I1001 06:48:37.992333 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rv6p9" event={"ID":"14c17dc0-a5a5-448b-a49a-fea64daa1d05","Type":"ContainerStarted","Data":"86b6101b74e55fb7fe1c8f3ed248b89d28ae95009e50217f2b8f7e7ef3e65630"} Oct 01 06:48:40 crc kubenswrapper[4661]: I1001 06:48:40.014304 4661 generic.go:334] "Generic (PLEG): container finished" podID="14c17dc0-a5a5-448b-a49a-fea64daa1d05" containerID="bd11d53a2d93d06bf9521c86026b82f0be13167e1d7d43461588175843a1e3d8" exitCode=0 Oct 01 06:48:40 crc kubenswrapper[4661]: I1001 06:48:40.014428 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rv6p9" event={"ID":"14c17dc0-a5a5-448b-a49a-fea64daa1d05","Type":"ContainerDied","Data":"bd11d53a2d93d06bf9521c86026b82f0be13167e1d7d43461588175843a1e3d8"} Oct 01 06:48:40 crc kubenswrapper[4661]: I1001 06:48:40.341459 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-72p97"] Oct 01 06:48:40 crc kubenswrapper[4661]: I1001 06:48:40.346741 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-72p97" Oct 01 06:48:40 crc kubenswrapper[4661]: I1001 06:48:40.358653 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-72p97"] Oct 01 06:48:40 crc kubenswrapper[4661]: I1001 06:48:40.471778 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8mpp\" (UniqueName: \"kubernetes.io/projected/ee5678e0-10d2-4507-8d14-68cb7107ff51-kube-api-access-m8mpp\") pod \"certified-operators-72p97\" (UID: \"ee5678e0-10d2-4507-8d14-68cb7107ff51\") " pod="openshift-marketplace/certified-operators-72p97" Oct 01 06:48:40 crc kubenswrapper[4661]: I1001 06:48:40.471838 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee5678e0-10d2-4507-8d14-68cb7107ff51-utilities\") pod \"certified-operators-72p97\" (UID: \"ee5678e0-10d2-4507-8d14-68cb7107ff51\") " pod="openshift-marketplace/certified-operators-72p97" Oct 01 06:48:40 crc kubenswrapper[4661]: I1001 06:48:40.471918 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee5678e0-10d2-4507-8d14-68cb7107ff51-catalog-content\") pod \"certified-operators-72p97\" (UID: \"ee5678e0-10d2-4507-8d14-68cb7107ff51\") " pod="openshift-marketplace/certified-operators-72p97" Oct 01 06:48:40 crc kubenswrapper[4661]: I1001 06:48:40.573229 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8mpp\" (UniqueName: \"kubernetes.io/projected/ee5678e0-10d2-4507-8d14-68cb7107ff51-kube-api-access-m8mpp\") pod \"certified-operators-72p97\" (UID: \"ee5678e0-10d2-4507-8d14-68cb7107ff51\") " pod="openshift-marketplace/certified-operators-72p97" Oct 01 06:48:40 crc kubenswrapper[4661]: I1001 06:48:40.573285 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee5678e0-10d2-4507-8d14-68cb7107ff51-utilities\") pod \"certified-operators-72p97\" (UID: \"ee5678e0-10d2-4507-8d14-68cb7107ff51\") " pod="openshift-marketplace/certified-operators-72p97" Oct 01 06:48:40 crc kubenswrapper[4661]: I1001 06:48:40.573368 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee5678e0-10d2-4507-8d14-68cb7107ff51-catalog-content\") pod \"certified-operators-72p97\" (UID: \"ee5678e0-10d2-4507-8d14-68cb7107ff51\") " pod="openshift-marketplace/certified-operators-72p97" Oct 01 06:48:40 crc kubenswrapper[4661]: I1001 06:48:40.573989 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee5678e0-10d2-4507-8d14-68cb7107ff51-catalog-content\") pod \"certified-operators-72p97\" (UID: \"ee5678e0-10d2-4507-8d14-68cb7107ff51\") " pod="openshift-marketplace/certified-operators-72p97" Oct 01 06:48:40 crc kubenswrapper[4661]: I1001 06:48:40.574139 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee5678e0-10d2-4507-8d14-68cb7107ff51-utilities\") pod \"certified-operators-72p97\" (UID: \"ee5678e0-10d2-4507-8d14-68cb7107ff51\") " pod="openshift-marketplace/certified-operators-72p97" Oct 01 06:48:40 crc kubenswrapper[4661]: I1001 06:48:40.599986 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8mpp\" (UniqueName: \"kubernetes.io/projected/ee5678e0-10d2-4507-8d14-68cb7107ff51-kube-api-access-m8mpp\") pod \"certified-operators-72p97\" (UID: \"ee5678e0-10d2-4507-8d14-68cb7107ff51\") " pod="openshift-marketplace/certified-operators-72p97" Oct 01 06:48:40 crc kubenswrapper[4661]: I1001 06:48:40.710114 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-72p97" Oct 01 06:48:41 crc kubenswrapper[4661]: I1001 06:48:41.039099 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rv6p9" event={"ID":"14c17dc0-a5a5-448b-a49a-fea64daa1d05","Type":"ContainerStarted","Data":"fbc66d8cd782161eaa00408096e6b238fb5b6052177177b8bbafecf9120ab896"} Oct 01 06:48:41 crc kubenswrapper[4661]: I1001 06:48:41.068007 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rv6p9" podStartSLOduration=3.617768454 podStartE2EDuration="6.067988453s" podCreationTimestamp="2025-10-01 06:48:35 +0000 UTC" firstStartedPulling="2025-10-01 06:48:37.994079469 +0000 UTC m=+4766.932058073" lastFinishedPulling="2025-10-01 06:48:40.444299458 +0000 UTC m=+4769.382278072" observedRunningTime="2025-10-01 06:48:41.061990899 +0000 UTC m=+4769.999969513" watchObservedRunningTime="2025-10-01 06:48:41.067988453 +0000 UTC m=+4770.005967067" Oct 01 06:48:41 crc kubenswrapper[4661]: I1001 06:48:41.298499 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-72p97"] Oct 01 06:48:41 crc kubenswrapper[4661]: W1001 06:48:41.304476 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podee5678e0_10d2_4507_8d14_68cb7107ff51.slice/crio-f29db3df9be96a2f1ac579563ee5d41a4b30b42170866958c0caaa522754f58b WatchSource:0}: Error finding container f29db3df9be96a2f1ac579563ee5d41a4b30b42170866958c0caaa522754f58b: Status 404 returned error can't find the container with id f29db3df9be96a2f1ac579563ee5d41a4b30b42170866958c0caaa522754f58b Oct 01 06:48:42 crc kubenswrapper[4661]: I1001 06:48:42.050048 4661 generic.go:334] "Generic (PLEG): container finished" podID="ee5678e0-10d2-4507-8d14-68cb7107ff51" containerID="7fa6cbc54e2eed7806e0a2f62ee164438b7df13f310c58fab1c0bb8db566aec6" exitCode=0 Oct 01 06:48:42 crc kubenswrapper[4661]: I1001 06:48:42.050101 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-72p97" event={"ID":"ee5678e0-10d2-4507-8d14-68cb7107ff51","Type":"ContainerDied","Data":"7fa6cbc54e2eed7806e0a2f62ee164438b7df13f310c58fab1c0bb8db566aec6"} Oct 01 06:48:42 crc kubenswrapper[4661]: I1001 06:48:42.050549 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-72p97" event={"ID":"ee5678e0-10d2-4507-8d14-68cb7107ff51","Type":"ContainerStarted","Data":"f29db3df9be96a2f1ac579563ee5d41a4b30b42170866958c0caaa522754f58b"} Oct 01 06:48:43 crc kubenswrapper[4661]: I1001 06:48:43.060521 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-72p97" event={"ID":"ee5678e0-10d2-4507-8d14-68cb7107ff51","Type":"ContainerStarted","Data":"0fa59766f342fbd763d53a78f36789e85e3eec86370fa5649a14fbcc075662a9"} Oct 01 06:48:45 crc kubenswrapper[4661]: I1001 06:48:45.094938 4661 generic.go:334] "Generic (PLEG): container finished" podID="ee5678e0-10d2-4507-8d14-68cb7107ff51" containerID="0fa59766f342fbd763d53a78f36789e85e3eec86370fa5649a14fbcc075662a9" exitCode=0 Oct 01 06:48:45 crc kubenswrapper[4661]: I1001 06:48:45.095005 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-72p97" event={"ID":"ee5678e0-10d2-4507-8d14-68cb7107ff51","Type":"ContainerDied","Data":"0fa59766f342fbd763d53a78f36789e85e3eec86370fa5649a14fbcc075662a9"} Oct 01 06:48:46 crc kubenswrapper[4661]: I1001 06:48:46.107249 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-72p97" event={"ID":"ee5678e0-10d2-4507-8d14-68cb7107ff51","Type":"ContainerStarted","Data":"64457d3d3e6ef4f6d8af4f0f30438eff51ee131d319e138fa23faafa63fc5fc4"} Oct 01 06:48:46 crc kubenswrapper[4661]: I1001 06:48:46.141085 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-72p97" podStartSLOduration=2.629497147 podStartE2EDuration="6.141060455s" podCreationTimestamp="2025-10-01 06:48:40 +0000 UTC" firstStartedPulling="2025-10-01 06:48:42.051835362 +0000 UTC m=+4770.989813976" lastFinishedPulling="2025-10-01 06:48:45.56339867 +0000 UTC m=+4774.501377284" observedRunningTime="2025-10-01 06:48:46.130115835 +0000 UTC m=+4775.068094459" watchObservedRunningTime="2025-10-01 06:48:46.141060455 +0000 UTC m=+4775.079039079" Oct 01 06:48:46 crc kubenswrapper[4661]: I1001 06:48:46.320243 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rv6p9" Oct 01 06:48:46 crc kubenswrapper[4661]: I1001 06:48:46.320280 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rv6p9" Oct 01 06:48:46 crc kubenswrapper[4661]: I1001 06:48:46.371216 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rv6p9" Oct 01 06:48:47 crc kubenswrapper[4661]: I1001 06:48:47.203075 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rv6p9" Oct 01 06:48:48 crc kubenswrapper[4661]: I1001 06:48:48.729546 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rv6p9"] Oct 01 06:48:49 crc kubenswrapper[4661]: I1001 06:48:49.133968 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rv6p9" podUID="14c17dc0-a5a5-448b-a49a-fea64daa1d05" containerName="registry-server" containerID="cri-o://fbc66d8cd782161eaa00408096e6b238fb5b6052177177b8bbafecf9120ab896" gracePeriod=2 Oct 01 06:48:49 crc kubenswrapper[4661]: I1001 06:48:49.671192 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rv6p9" Oct 01 06:48:49 crc kubenswrapper[4661]: I1001 06:48:49.714874 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14c17dc0-a5a5-448b-a49a-fea64daa1d05-utilities\") pod \"14c17dc0-a5a5-448b-a49a-fea64daa1d05\" (UID: \"14c17dc0-a5a5-448b-a49a-fea64daa1d05\") " Oct 01 06:48:49 crc kubenswrapper[4661]: I1001 06:48:49.715026 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14c17dc0-a5a5-448b-a49a-fea64daa1d05-catalog-content\") pod \"14c17dc0-a5a5-448b-a49a-fea64daa1d05\" (UID: \"14c17dc0-a5a5-448b-a49a-fea64daa1d05\") " Oct 01 06:48:49 crc kubenswrapper[4661]: I1001 06:48:49.715098 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r72ch\" (UniqueName: \"kubernetes.io/projected/14c17dc0-a5a5-448b-a49a-fea64daa1d05-kube-api-access-r72ch\") pod \"14c17dc0-a5a5-448b-a49a-fea64daa1d05\" (UID: \"14c17dc0-a5a5-448b-a49a-fea64daa1d05\") " Oct 01 06:48:49 crc kubenswrapper[4661]: I1001 06:48:49.716310 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14c17dc0-a5a5-448b-a49a-fea64daa1d05-utilities" (OuterVolumeSpecName: "utilities") pod "14c17dc0-a5a5-448b-a49a-fea64daa1d05" (UID: "14c17dc0-a5a5-448b-a49a-fea64daa1d05"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:48:49 crc kubenswrapper[4661]: I1001 06:48:49.722025 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14c17dc0-a5a5-448b-a49a-fea64daa1d05-kube-api-access-r72ch" (OuterVolumeSpecName: "kube-api-access-r72ch") pod "14c17dc0-a5a5-448b-a49a-fea64daa1d05" (UID: "14c17dc0-a5a5-448b-a49a-fea64daa1d05"). InnerVolumeSpecName "kube-api-access-r72ch". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:48:49 crc kubenswrapper[4661]: I1001 06:48:49.738321 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14c17dc0-a5a5-448b-a49a-fea64daa1d05-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "14c17dc0-a5a5-448b-a49a-fea64daa1d05" (UID: "14c17dc0-a5a5-448b-a49a-fea64daa1d05"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:48:49 crc kubenswrapper[4661]: I1001 06:48:49.817538 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14c17dc0-a5a5-448b-a49a-fea64daa1d05-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:48:49 crc kubenswrapper[4661]: I1001 06:48:49.817580 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14c17dc0-a5a5-448b-a49a-fea64daa1d05-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:48:49 crc kubenswrapper[4661]: I1001 06:48:49.817594 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r72ch\" (UniqueName: \"kubernetes.io/projected/14c17dc0-a5a5-448b-a49a-fea64daa1d05-kube-api-access-r72ch\") on node \"crc\" DevicePath \"\"" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.145399 4661 generic.go:334] "Generic (PLEG): container finished" podID="14c17dc0-a5a5-448b-a49a-fea64daa1d05" containerID="fbc66d8cd782161eaa00408096e6b238fb5b6052177177b8bbafecf9120ab896" exitCode=0 Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.145441 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rv6p9" event={"ID":"14c17dc0-a5a5-448b-a49a-fea64daa1d05","Type":"ContainerDied","Data":"fbc66d8cd782161eaa00408096e6b238fb5b6052177177b8bbafecf9120ab896"} Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.145473 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rv6p9" event={"ID":"14c17dc0-a5a5-448b-a49a-fea64daa1d05","Type":"ContainerDied","Data":"86b6101b74e55fb7fe1c8f3ed248b89d28ae95009e50217f2b8f7e7ef3e65630"} Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.145503 4661 scope.go:117] "RemoveContainer" containerID="fbc66d8cd782161eaa00408096e6b238fb5b6052177177b8bbafecf9120ab896" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.145502 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rv6p9" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.172003 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rv6p9"] Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.174239 4661 scope.go:117] "RemoveContainer" containerID="bd11d53a2d93d06bf9521c86026b82f0be13167e1d7d43461588175843a1e3d8" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.181033 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rv6p9"] Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.198533 4661 scope.go:117] "RemoveContainer" containerID="6bb3c26753152816b107452a255d4f5d214e5295c2ebacccf946ad3147e0a0e3" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.283387 4661 scope.go:117] "RemoveContainer" containerID="fbc66d8cd782161eaa00408096e6b238fb5b6052177177b8bbafecf9120ab896" Oct 01 06:48:50 crc kubenswrapper[4661]: E1001 06:48:50.283833 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fbc66d8cd782161eaa00408096e6b238fb5b6052177177b8bbafecf9120ab896\": container with ID starting with fbc66d8cd782161eaa00408096e6b238fb5b6052177177b8bbafecf9120ab896 not found: ID does not exist" containerID="fbc66d8cd782161eaa00408096e6b238fb5b6052177177b8bbafecf9120ab896" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.283883 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbc66d8cd782161eaa00408096e6b238fb5b6052177177b8bbafecf9120ab896"} err="failed to get container status \"fbc66d8cd782161eaa00408096e6b238fb5b6052177177b8bbafecf9120ab896\": rpc error: code = NotFound desc = could not find container \"fbc66d8cd782161eaa00408096e6b238fb5b6052177177b8bbafecf9120ab896\": container with ID starting with fbc66d8cd782161eaa00408096e6b238fb5b6052177177b8bbafecf9120ab896 not found: ID does not exist" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.283921 4661 scope.go:117] "RemoveContainer" containerID="bd11d53a2d93d06bf9521c86026b82f0be13167e1d7d43461588175843a1e3d8" Oct 01 06:48:50 crc kubenswrapper[4661]: E1001 06:48:50.284648 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd11d53a2d93d06bf9521c86026b82f0be13167e1d7d43461588175843a1e3d8\": container with ID starting with bd11d53a2d93d06bf9521c86026b82f0be13167e1d7d43461588175843a1e3d8 not found: ID does not exist" containerID="bd11d53a2d93d06bf9521c86026b82f0be13167e1d7d43461588175843a1e3d8" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.284707 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd11d53a2d93d06bf9521c86026b82f0be13167e1d7d43461588175843a1e3d8"} err="failed to get container status \"bd11d53a2d93d06bf9521c86026b82f0be13167e1d7d43461588175843a1e3d8\": rpc error: code = NotFound desc = could not find container \"bd11d53a2d93d06bf9521c86026b82f0be13167e1d7d43461588175843a1e3d8\": container with ID starting with bd11d53a2d93d06bf9521c86026b82f0be13167e1d7d43461588175843a1e3d8 not found: ID does not exist" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.284751 4661 scope.go:117] "RemoveContainer" containerID="6bb3c26753152816b107452a255d4f5d214e5295c2ebacccf946ad3147e0a0e3" Oct 01 06:48:50 crc kubenswrapper[4661]: E1001 06:48:50.285252 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bb3c26753152816b107452a255d4f5d214e5295c2ebacccf946ad3147e0a0e3\": container with ID starting with 6bb3c26753152816b107452a255d4f5d214e5295c2ebacccf946ad3147e0a0e3 not found: ID does not exist" containerID="6bb3c26753152816b107452a255d4f5d214e5295c2ebacccf946ad3147e0a0e3" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.285302 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bb3c26753152816b107452a255d4f5d214e5295c2ebacccf946ad3147e0a0e3"} err="failed to get container status \"6bb3c26753152816b107452a255d4f5d214e5295c2ebacccf946ad3147e0a0e3\": rpc error: code = NotFound desc = could not find container \"6bb3c26753152816b107452a255d4f5d214e5295c2ebacccf946ad3147e0a0e3\": container with ID starting with 6bb3c26753152816b107452a255d4f5d214e5295c2ebacccf946ad3147e0a0e3 not found: ID does not exist" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.538401 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qpqm8"] Oct 01 06:48:50 crc kubenswrapper[4661]: E1001 06:48:50.539518 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14c17dc0-a5a5-448b-a49a-fea64daa1d05" containerName="extract-content" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.539547 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="14c17dc0-a5a5-448b-a49a-fea64daa1d05" containerName="extract-content" Oct 01 06:48:50 crc kubenswrapper[4661]: E1001 06:48:50.539602 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14c17dc0-a5a5-448b-a49a-fea64daa1d05" containerName="extract-utilities" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.539617 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="14c17dc0-a5a5-448b-a49a-fea64daa1d05" containerName="extract-utilities" Oct 01 06:48:50 crc kubenswrapper[4661]: E1001 06:48:50.539762 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14c17dc0-a5a5-448b-a49a-fea64daa1d05" containerName="registry-server" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.539778 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="14c17dc0-a5a5-448b-a49a-fea64daa1d05" containerName="registry-server" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.540173 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="14c17dc0-a5a5-448b-a49a-fea64daa1d05" containerName="registry-server" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.542967 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qpqm8" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.553627 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qpqm8"] Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.634687 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39647bcb-e877-49b9-a960-6d5689c2d94d-utilities\") pod \"redhat-operators-qpqm8\" (UID: \"39647bcb-e877-49b9-a960-6d5689c2d94d\") " pod="openshift-marketplace/redhat-operators-qpqm8" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.634829 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n52ct\" (UniqueName: \"kubernetes.io/projected/39647bcb-e877-49b9-a960-6d5689c2d94d-kube-api-access-n52ct\") pod \"redhat-operators-qpqm8\" (UID: \"39647bcb-e877-49b9-a960-6d5689c2d94d\") " pod="openshift-marketplace/redhat-operators-qpqm8" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.634915 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39647bcb-e877-49b9-a960-6d5689c2d94d-catalog-content\") pod \"redhat-operators-qpqm8\" (UID: \"39647bcb-e877-49b9-a960-6d5689c2d94d\") " pod="openshift-marketplace/redhat-operators-qpqm8" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.710581 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-72p97" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.710646 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-72p97" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.737525 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39647bcb-e877-49b9-a960-6d5689c2d94d-utilities\") pod \"redhat-operators-qpqm8\" (UID: \"39647bcb-e877-49b9-a960-6d5689c2d94d\") " pod="openshift-marketplace/redhat-operators-qpqm8" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.737799 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n52ct\" (UniqueName: \"kubernetes.io/projected/39647bcb-e877-49b9-a960-6d5689c2d94d-kube-api-access-n52ct\") pod \"redhat-operators-qpqm8\" (UID: \"39647bcb-e877-49b9-a960-6d5689c2d94d\") " pod="openshift-marketplace/redhat-operators-qpqm8" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.737907 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39647bcb-e877-49b9-a960-6d5689c2d94d-catalog-content\") pod \"redhat-operators-qpqm8\" (UID: \"39647bcb-e877-49b9-a960-6d5689c2d94d\") " pod="openshift-marketplace/redhat-operators-qpqm8" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.738244 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39647bcb-e877-49b9-a960-6d5689c2d94d-utilities\") pod \"redhat-operators-qpqm8\" (UID: \"39647bcb-e877-49b9-a960-6d5689c2d94d\") " pod="openshift-marketplace/redhat-operators-qpqm8" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.738421 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39647bcb-e877-49b9-a960-6d5689c2d94d-catalog-content\") pod \"redhat-operators-qpqm8\" (UID: \"39647bcb-e877-49b9-a960-6d5689c2d94d\") " pod="openshift-marketplace/redhat-operators-qpqm8" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.916808 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n52ct\" (UniqueName: \"kubernetes.io/projected/39647bcb-e877-49b9-a960-6d5689c2d94d-kube-api-access-n52ct\") pod \"redhat-operators-qpqm8\" (UID: \"39647bcb-e877-49b9-a960-6d5689c2d94d\") " pod="openshift-marketplace/redhat-operators-qpqm8" Oct 01 06:48:50 crc kubenswrapper[4661]: I1001 06:48:50.977906 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-72p97" Oct 01 06:48:51 crc kubenswrapper[4661]: I1001 06:48:51.167128 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qpqm8" Oct 01 06:48:51 crc kubenswrapper[4661]: I1001 06:48:51.227005 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-72p97" Oct 01 06:48:51 crc kubenswrapper[4661]: I1001 06:48:51.643228 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qpqm8"] Oct 01 06:48:51 crc kubenswrapper[4661]: I1001 06:48:51.769531 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14c17dc0-a5a5-448b-a49a-fea64daa1d05" path="/var/lib/kubelet/pods/14c17dc0-a5a5-448b-a49a-fea64daa1d05/volumes" Oct 01 06:48:52 crc kubenswrapper[4661]: I1001 06:48:52.170522 4661 generic.go:334] "Generic (PLEG): container finished" podID="39647bcb-e877-49b9-a960-6d5689c2d94d" containerID="cf0c9cd36269eeb92de9cf43bc33d39b9df20e522fa4cb0f4255b12fd751183c" exitCode=0 Oct 01 06:48:52 crc kubenswrapper[4661]: I1001 06:48:52.170615 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qpqm8" event={"ID":"39647bcb-e877-49b9-a960-6d5689c2d94d","Type":"ContainerDied","Data":"cf0c9cd36269eeb92de9cf43bc33d39b9df20e522fa4cb0f4255b12fd751183c"} Oct 01 06:48:52 crc kubenswrapper[4661]: I1001 06:48:52.170702 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qpqm8" event={"ID":"39647bcb-e877-49b9-a960-6d5689c2d94d","Type":"ContainerStarted","Data":"9ff0b5891df35679d27b314438d4d78447367347d06340c8a8b5f2aa4d4f2466"} Oct 01 06:48:54 crc kubenswrapper[4661]: I1001 06:48:54.198785 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qpqm8" event={"ID":"39647bcb-e877-49b9-a960-6d5689c2d94d","Type":"ContainerStarted","Data":"ff131949a0a37e39461a62dc2d8a005843ad2a4f2286a1727c0bdf1060c68244"} Oct 01 06:48:54 crc kubenswrapper[4661]: I1001 06:48:54.947098 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-72p97"] Oct 01 06:48:54 crc kubenswrapper[4661]: I1001 06:48:54.947603 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-72p97" podUID="ee5678e0-10d2-4507-8d14-68cb7107ff51" containerName="registry-server" containerID="cri-o://64457d3d3e6ef4f6d8af4f0f30438eff51ee131d319e138fa23faafa63fc5fc4" gracePeriod=2 Oct 01 06:48:55 crc kubenswrapper[4661]: I1001 06:48:55.212620 4661 generic.go:334] "Generic (PLEG): container finished" podID="ee5678e0-10d2-4507-8d14-68cb7107ff51" containerID="64457d3d3e6ef4f6d8af4f0f30438eff51ee131d319e138fa23faafa63fc5fc4" exitCode=0 Oct 01 06:48:55 crc kubenswrapper[4661]: I1001 06:48:55.213823 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-72p97" event={"ID":"ee5678e0-10d2-4507-8d14-68cb7107ff51","Type":"ContainerDied","Data":"64457d3d3e6ef4f6d8af4f0f30438eff51ee131d319e138fa23faafa63fc5fc4"} Oct 01 06:48:55 crc kubenswrapper[4661]: I1001 06:48:55.422514 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-72p97" Oct 01 06:48:55 crc kubenswrapper[4661]: I1001 06:48:55.432409 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8mpp\" (UniqueName: \"kubernetes.io/projected/ee5678e0-10d2-4507-8d14-68cb7107ff51-kube-api-access-m8mpp\") pod \"ee5678e0-10d2-4507-8d14-68cb7107ff51\" (UID: \"ee5678e0-10d2-4507-8d14-68cb7107ff51\") " Oct 01 06:48:55 crc kubenswrapper[4661]: I1001 06:48:55.432591 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee5678e0-10d2-4507-8d14-68cb7107ff51-catalog-content\") pod \"ee5678e0-10d2-4507-8d14-68cb7107ff51\" (UID: \"ee5678e0-10d2-4507-8d14-68cb7107ff51\") " Oct 01 06:48:55 crc kubenswrapper[4661]: I1001 06:48:55.432680 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee5678e0-10d2-4507-8d14-68cb7107ff51-utilities\") pod \"ee5678e0-10d2-4507-8d14-68cb7107ff51\" (UID: \"ee5678e0-10d2-4507-8d14-68cb7107ff51\") " Oct 01 06:48:55 crc kubenswrapper[4661]: I1001 06:48:55.433745 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee5678e0-10d2-4507-8d14-68cb7107ff51-utilities" (OuterVolumeSpecName: "utilities") pod "ee5678e0-10d2-4507-8d14-68cb7107ff51" (UID: "ee5678e0-10d2-4507-8d14-68cb7107ff51"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:48:55 crc kubenswrapper[4661]: I1001 06:48:55.441409 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee5678e0-10d2-4507-8d14-68cb7107ff51-kube-api-access-m8mpp" (OuterVolumeSpecName: "kube-api-access-m8mpp") pod "ee5678e0-10d2-4507-8d14-68cb7107ff51" (UID: "ee5678e0-10d2-4507-8d14-68cb7107ff51"). InnerVolumeSpecName "kube-api-access-m8mpp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:48:55 crc kubenswrapper[4661]: I1001 06:48:55.477761 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee5678e0-10d2-4507-8d14-68cb7107ff51-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ee5678e0-10d2-4507-8d14-68cb7107ff51" (UID: "ee5678e0-10d2-4507-8d14-68cb7107ff51"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:48:55 crc kubenswrapper[4661]: I1001 06:48:55.534616 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8mpp\" (UniqueName: \"kubernetes.io/projected/ee5678e0-10d2-4507-8d14-68cb7107ff51-kube-api-access-m8mpp\") on node \"crc\" DevicePath \"\"" Oct 01 06:48:55 crc kubenswrapper[4661]: I1001 06:48:55.534669 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee5678e0-10d2-4507-8d14-68cb7107ff51-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:48:55 crc kubenswrapper[4661]: I1001 06:48:55.534682 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee5678e0-10d2-4507-8d14-68cb7107ff51-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:48:56 crc kubenswrapper[4661]: I1001 06:48:56.230816 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-72p97" event={"ID":"ee5678e0-10d2-4507-8d14-68cb7107ff51","Type":"ContainerDied","Data":"f29db3df9be96a2f1ac579563ee5d41a4b30b42170866958c0caaa522754f58b"} Oct 01 06:48:56 crc kubenswrapper[4661]: I1001 06:48:56.231262 4661 scope.go:117] "RemoveContainer" containerID="64457d3d3e6ef4f6d8af4f0f30438eff51ee131d319e138fa23faafa63fc5fc4" Oct 01 06:48:56 crc kubenswrapper[4661]: I1001 06:48:56.230880 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-72p97" Oct 01 06:48:56 crc kubenswrapper[4661]: I1001 06:48:56.270347 4661 scope.go:117] "RemoveContainer" containerID="0fa59766f342fbd763d53a78f36789e85e3eec86370fa5649a14fbcc075662a9" Oct 01 06:48:56 crc kubenswrapper[4661]: I1001 06:48:56.276016 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-72p97"] Oct 01 06:48:56 crc kubenswrapper[4661]: I1001 06:48:56.290445 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-72p97"] Oct 01 06:48:56 crc kubenswrapper[4661]: I1001 06:48:56.372625 4661 scope.go:117] "RemoveContainer" containerID="7fa6cbc54e2eed7806e0a2f62ee164438b7df13f310c58fab1c0bb8db566aec6" Oct 01 06:48:57 crc kubenswrapper[4661]: I1001 06:48:57.246752 4661 generic.go:334] "Generic (PLEG): container finished" podID="39647bcb-e877-49b9-a960-6d5689c2d94d" containerID="ff131949a0a37e39461a62dc2d8a005843ad2a4f2286a1727c0bdf1060c68244" exitCode=0 Oct 01 06:48:57 crc kubenswrapper[4661]: I1001 06:48:57.246817 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qpqm8" event={"ID":"39647bcb-e877-49b9-a960-6d5689c2d94d","Type":"ContainerDied","Data":"ff131949a0a37e39461a62dc2d8a005843ad2a4f2286a1727c0bdf1060c68244"} Oct 01 06:48:57 crc kubenswrapper[4661]: I1001 06:48:57.769154 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee5678e0-10d2-4507-8d14-68cb7107ff51" path="/var/lib/kubelet/pods/ee5678e0-10d2-4507-8d14-68cb7107ff51/volumes" Oct 01 06:48:58 crc kubenswrapper[4661]: I1001 06:48:58.257737 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qpqm8" event={"ID":"39647bcb-e877-49b9-a960-6d5689c2d94d","Type":"ContainerStarted","Data":"c6a7905f0794e153845ddb031b86f6be6f104440220df41729a33dabe8643c37"} Oct 01 06:49:01 crc kubenswrapper[4661]: I1001 06:49:01.167772 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qpqm8" Oct 01 06:49:01 crc kubenswrapper[4661]: I1001 06:49:01.168346 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qpqm8" Oct 01 06:49:02 crc kubenswrapper[4661]: I1001 06:49:02.220921 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-qpqm8" podUID="39647bcb-e877-49b9-a960-6d5689c2d94d" containerName="registry-server" probeResult="failure" output=< Oct 01 06:49:02 crc kubenswrapper[4661]: timeout: failed to connect service ":50051" within 1s Oct 01 06:49:02 crc kubenswrapper[4661]: > Oct 01 06:49:04 crc kubenswrapper[4661]: I1001 06:49:04.309198 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:49:04 crc kubenswrapper[4661]: I1001 06:49:04.309739 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:49:04 crc kubenswrapper[4661]: I1001 06:49:04.309786 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 06:49:04 crc kubenswrapper[4661]: I1001 06:49:04.310540 4661 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6a337ad704a6224229051de7892e1fff198cf1bc578e837f53a93425bd4f9d64"} pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 06:49:04 crc kubenswrapper[4661]: I1001 06:49:04.310594 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" containerID="cri-o://6a337ad704a6224229051de7892e1fff198cf1bc578e837f53a93425bd4f9d64" gracePeriod=600 Oct 01 06:49:05 crc kubenswrapper[4661]: I1001 06:49:05.335773 4661 generic.go:334] "Generic (PLEG): container finished" podID="7584c4bc-4202-487e-a2b4-4319f428a792" containerID="6a337ad704a6224229051de7892e1fff198cf1bc578e837f53a93425bd4f9d64" exitCode=0 Oct 01 06:49:05 crc kubenswrapper[4661]: I1001 06:49:05.335883 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerDied","Data":"6a337ad704a6224229051de7892e1fff198cf1bc578e837f53a93425bd4f9d64"} Oct 01 06:49:05 crc kubenswrapper[4661]: I1001 06:49:05.337500 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01"} Oct 01 06:49:05 crc kubenswrapper[4661]: I1001 06:49:05.337556 4661 scope.go:117] "RemoveContainer" containerID="95aff1b64d0b2919a5df84b0bb914c7d8decdcba657583232b19efbfb2836ebc" Oct 01 06:49:05 crc kubenswrapper[4661]: I1001 06:49:05.366426 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qpqm8" podStartSLOduration=9.8515178 podStartE2EDuration="15.366404201s" podCreationTimestamp="2025-10-01 06:48:50 +0000 UTC" firstStartedPulling="2025-10-01 06:48:52.173109285 +0000 UTC m=+4781.111087939" lastFinishedPulling="2025-10-01 06:48:57.687995736 +0000 UTC m=+4786.625974340" observedRunningTime="2025-10-01 06:48:58.27657886 +0000 UTC m=+4787.214557474" watchObservedRunningTime="2025-10-01 06:49:05.366404201 +0000 UTC m=+4794.304382826" Oct 01 06:49:11 crc kubenswrapper[4661]: I1001 06:49:11.745516 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qpqm8" Oct 01 06:49:11 crc kubenswrapper[4661]: I1001 06:49:11.850175 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qpqm8" Oct 01 06:49:12 crc kubenswrapper[4661]: I1001 06:49:12.008287 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qpqm8"] Oct 01 06:49:13 crc kubenswrapper[4661]: I1001 06:49:13.439248 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qpqm8" podUID="39647bcb-e877-49b9-a960-6d5689c2d94d" containerName="registry-server" containerID="cri-o://c6a7905f0794e153845ddb031b86f6be6f104440220df41729a33dabe8643c37" gracePeriod=2 Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.367058 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qpqm8" Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.450171 4661 generic.go:334] "Generic (PLEG): container finished" podID="39647bcb-e877-49b9-a960-6d5689c2d94d" containerID="c6a7905f0794e153845ddb031b86f6be6f104440220df41729a33dabe8643c37" exitCode=0 Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.450218 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qpqm8" event={"ID":"39647bcb-e877-49b9-a960-6d5689c2d94d","Type":"ContainerDied","Data":"c6a7905f0794e153845ddb031b86f6be6f104440220df41729a33dabe8643c37"} Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.450241 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qpqm8" Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.450261 4661 scope.go:117] "RemoveContainer" containerID="c6a7905f0794e153845ddb031b86f6be6f104440220df41729a33dabe8643c37" Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.450249 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qpqm8" event={"ID":"39647bcb-e877-49b9-a960-6d5689c2d94d","Type":"ContainerDied","Data":"9ff0b5891df35679d27b314438d4d78447367347d06340c8a8b5f2aa4d4f2466"} Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.471426 4661 scope.go:117] "RemoveContainer" containerID="ff131949a0a37e39461a62dc2d8a005843ad2a4f2286a1727c0bdf1060c68244" Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.504658 4661 scope.go:117] "RemoveContainer" containerID="cf0c9cd36269eeb92de9cf43bc33d39b9df20e522fa4cb0f4255b12fd751183c" Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.547199 4661 scope.go:117] "RemoveContainer" containerID="c6a7905f0794e153845ddb031b86f6be6f104440220df41729a33dabe8643c37" Oct 01 06:49:14 crc kubenswrapper[4661]: E1001 06:49:14.547677 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6a7905f0794e153845ddb031b86f6be6f104440220df41729a33dabe8643c37\": container with ID starting with c6a7905f0794e153845ddb031b86f6be6f104440220df41729a33dabe8643c37 not found: ID does not exist" containerID="c6a7905f0794e153845ddb031b86f6be6f104440220df41729a33dabe8643c37" Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.547774 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6a7905f0794e153845ddb031b86f6be6f104440220df41729a33dabe8643c37"} err="failed to get container status \"c6a7905f0794e153845ddb031b86f6be6f104440220df41729a33dabe8643c37\": rpc error: code = NotFound desc = could not find container \"c6a7905f0794e153845ddb031b86f6be6f104440220df41729a33dabe8643c37\": container with ID starting with c6a7905f0794e153845ddb031b86f6be6f104440220df41729a33dabe8643c37 not found: ID does not exist" Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.547869 4661 scope.go:117] "RemoveContainer" containerID="ff131949a0a37e39461a62dc2d8a005843ad2a4f2286a1727c0bdf1060c68244" Oct 01 06:49:14 crc kubenswrapper[4661]: E1001 06:49:14.548427 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff131949a0a37e39461a62dc2d8a005843ad2a4f2286a1727c0bdf1060c68244\": container with ID starting with ff131949a0a37e39461a62dc2d8a005843ad2a4f2286a1727c0bdf1060c68244 not found: ID does not exist" containerID="ff131949a0a37e39461a62dc2d8a005843ad2a4f2286a1727c0bdf1060c68244" Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.548452 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff131949a0a37e39461a62dc2d8a005843ad2a4f2286a1727c0bdf1060c68244"} err="failed to get container status \"ff131949a0a37e39461a62dc2d8a005843ad2a4f2286a1727c0bdf1060c68244\": rpc error: code = NotFound desc = could not find container \"ff131949a0a37e39461a62dc2d8a005843ad2a4f2286a1727c0bdf1060c68244\": container with ID starting with ff131949a0a37e39461a62dc2d8a005843ad2a4f2286a1727c0bdf1060c68244 not found: ID does not exist" Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.548468 4661 scope.go:117] "RemoveContainer" containerID="cf0c9cd36269eeb92de9cf43bc33d39b9df20e522fa4cb0f4255b12fd751183c" Oct 01 06:49:14 crc kubenswrapper[4661]: E1001 06:49:14.548809 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf0c9cd36269eeb92de9cf43bc33d39b9df20e522fa4cb0f4255b12fd751183c\": container with ID starting with cf0c9cd36269eeb92de9cf43bc33d39b9df20e522fa4cb0f4255b12fd751183c not found: ID does not exist" containerID="cf0c9cd36269eeb92de9cf43bc33d39b9df20e522fa4cb0f4255b12fd751183c" Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.548908 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf0c9cd36269eeb92de9cf43bc33d39b9df20e522fa4cb0f4255b12fd751183c"} err="failed to get container status \"cf0c9cd36269eeb92de9cf43bc33d39b9df20e522fa4cb0f4255b12fd751183c\": rpc error: code = NotFound desc = could not find container \"cf0c9cd36269eeb92de9cf43bc33d39b9df20e522fa4cb0f4255b12fd751183c\": container with ID starting with cf0c9cd36269eeb92de9cf43bc33d39b9df20e522fa4cb0f4255b12fd751183c not found: ID does not exist" Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.568114 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39647bcb-e877-49b9-a960-6d5689c2d94d-utilities\") pod \"39647bcb-e877-49b9-a960-6d5689c2d94d\" (UID: \"39647bcb-e877-49b9-a960-6d5689c2d94d\") " Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.568216 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n52ct\" (UniqueName: \"kubernetes.io/projected/39647bcb-e877-49b9-a960-6d5689c2d94d-kube-api-access-n52ct\") pod \"39647bcb-e877-49b9-a960-6d5689c2d94d\" (UID: \"39647bcb-e877-49b9-a960-6d5689c2d94d\") " Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.568290 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39647bcb-e877-49b9-a960-6d5689c2d94d-catalog-content\") pod \"39647bcb-e877-49b9-a960-6d5689c2d94d\" (UID: \"39647bcb-e877-49b9-a960-6d5689c2d94d\") " Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.569073 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39647bcb-e877-49b9-a960-6d5689c2d94d-utilities" (OuterVolumeSpecName: "utilities") pod "39647bcb-e877-49b9-a960-6d5689c2d94d" (UID: "39647bcb-e877-49b9-a960-6d5689c2d94d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.583158 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39647bcb-e877-49b9-a960-6d5689c2d94d-kube-api-access-n52ct" (OuterVolumeSpecName: "kube-api-access-n52ct") pod "39647bcb-e877-49b9-a960-6d5689c2d94d" (UID: "39647bcb-e877-49b9-a960-6d5689c2d94d"). InnerVolumeSpecName "kube-api-access-n52ct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.656489 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39647bcb-e877-49b9-a960-6d5689c2d94d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "39647bcb-e877-49b9-a960-6d5689c2d94d" (UID: "39647bcb-e877-49b9-a960-6d5689c2d94d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.671249 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39647bcb-e877-49b9-a960-6d5689c2d94d-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.671299 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n52ct\" (UniqueName: \"kubernetes.io/projected/39647bcb-e877-49b9-a960-6d5689c2d94d-kube-api-access-n52ct\") on node \"crc\" DevicePath \"\"" Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.671312 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39647bcb-e877-49b9-a960-6d5689c2d94d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.787133 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qpqm8"] Oct 01 06:49:14 crc kubenswrapper[4661]: I1001 06:49:14.794026 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qpqm8"] Oct 01 06:49:15 crc kubenswrapper[4661]: I1001 06:49:15.773482 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39647bcb-e877-49b9-a960-6d5689c2d94d" path="/var/lib/kubelet/pods/39647bcb-e877-49b9-a960-6d5689c2d94d/volumes" Oct 01 06:51:04 crc kubenswrapper[4661]: I1001 06:51:04.309288 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:51:04 crc kubenswrapper[4661]: I1001 06:51:04.310050 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:51:34 crc kubenswrapper[4661]: I1001 06:51:34.309514 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:51:34 crc kubenswrapper[4661]: I1001 06:51:34.310502 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:52:04 crc kubenswrapper[4661]: I1001 06:52:04.309857 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:52:04 crc kubenswrapper[4661]: I1001 06:52:04.310502 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:52:04 crc kubenswrapper[4661]: I1001 06:52:04.310565 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 06:52:04 crc kubenswrapper[4661]: I1001 06:52:04.311229 4661 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01"} pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 06:52:04 crc kubenswrapper[4661]: I1001 06:52:04.311303 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" containerID="cri-o://28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" gracePeriod=600 Oct 01 06:52:04 crc kubenswrapper[4661]: E1001 06:52:04.440538 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:52:04 crc kubenswrapper[4661]: I1001 06:52:04.587505 4661 generic.go:334] "Generic (PLEG): container finished" podID="7584c4bc-4202-487e-a2b4-4319f428a792" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" exitCode=0 Oct 01 06:52:04 crc kubenswrapper[4661]: I1001 06:52:04.587622 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerDied","Data":"28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01"} Oct 01 06:52:04 crc kubenswrapper[4661]: I1001 06:52:04.587730 4661 scope.go:117] "RemoveContainer" containerID="6a337ad704a6224229051de7892e1fff198cf1bc578e837f53a93425bd4f9d64" Oct 01 06:52:04 crc kubenswrapper[4661]: I1001 06:52:04.589242 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:52:04 crc kubenswrapper[4661]: E1001 06:52:04.590020 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:52:17 crc kubenswrapper[4661]: I1001 06:52:17.757234 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:52:17 crc kubenswrapper[4661]: E1001 06:52:17.758378 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:52:30 crc kubenswrapper[4661]: I1001 06:52:30.758311 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:52:30 crc kubenswrapper[4661]: E1001 06:52:30.759427 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:52:44 crc kubenswrapper[4661]: I1001 06:52:44.756943 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:52:44 crc kubenswrapper[4661]: E1001 06:52:44.759373 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:52:57 crc kubenswrapper[4661]: I1001 06:52:57.757989 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:52:57 crc kubenswrapper[4661]: E1001 06:52:57.758893 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:53:12 crc kubenswrapper[4661]: I1001 06:53:12.757082 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:53:12 crc kubenswrapper[4661]: E1001 06:53:12.757994 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:53:24 crc kubenswrapper[4661]: I1001 06:53:24.756912 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:53:24 crc kubenswrapper[4661]: E1001 06:53:24.757950 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:53:35 crc kubenswrapper[4661]: I1001 06:53:35.757253 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:53:35 crc kubenswrapper[4661]: E1001 06:53:35.758425 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:53:47 crc kubenswrapper[4661]: I1001 06:53:47.756792 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:53:47 crc kubenswrapper[4661]: E1001 06:53:47.757871 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:54:00 crc kubenswrapper[4661]: I1001 06:54:00.757896 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:54:00 crc kubenswrapper[4661]: E1001 06:54:00.758971 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:54:15 crc kubenswrapper[4661]: I1001 06:54:15.757980 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:54:15 crc kubenswrapper[4661]: E1001 06:54:15.758823 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:54:28 crc kubenswrapper[4661]: I1001 06:54:28.757186 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:54:28 crc kubenswrapper[4661]: E1001 06:54:28.758278 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:54:41 crc kubenswrapper[4661]: I1001 06:54:41.768666 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:54:41 crc kubenswrapper[4661]: E1001 06:54:41.769723 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:54:56 crc kubenswrapper[4661]: I1001 06:54:56.757512 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:54:56 crc kubenswrapper[4661]: E1001 06:54:56.758754 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:55:10 crc kubenswrapper[4661]: I1001 06:55:10.757408 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:55:10 crc kubenswrapper[4661]: E1001 06:55:10.758845 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:55:22 crc kubenswrapper[4661]: I1001 06:55:22.757849 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:55:22 crc kubenswrapper[4661]: E1001 06:55:22.759224 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:55:37 crc kubenswrapper[4661]: I1001 06:55:37.757396 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:55:37 crc kubenswrapper[4661]: E1001 06:55:37.758741 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:55:49 crc kubenswrapper[4661]: I1001 06:55:49.757908 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:55:49 crc kubenswrapper[4661]: E1001 06:55:49.758909 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:56:01 crc kubenswrapper[4661]: I1001 06:56:01.770267 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:56:01 crc kubenswrapper[4661]: E1001 06:56:01.771445 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:56:16 crc kubenswrapper[4661]: I1001 06:56:16.757524 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:56:16 crc kubenswrapper[4661]: E1001 06:56:16.758829 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:56:31 crc kubenswrapper[4661]: I1001 06:56:31.773422 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:56:31 crc kubenswrapper[4661]: E1001 06:56:31.774606 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:56:43 crc kubenswrapper[4661]: I1001 06:56:43.758196 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:56:43 crc kubenswrapper[4661]: E1001 06:56:43.759176 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:56:54 crc kubenswrapper[4661]: I1001 06:56:54.757032 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:56:54 crc kubenswrapper[4661]: E1001 06:56:54.758026 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 06:56:55 crc kubenswrapper[4661]: I1001 06:56:55.977316 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-88fqn"] Oct 01 06:56:55 crc kubenswrapper[4661]: E1001 06:56:55.978154 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39647bcb-e877-49b9-a960-6d5689c2d94d" containerName="extract-content" Oct 01 06:56:55 crc kubenswrapper[4661]: I1001 06:56:55.978171 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="39647bcb-e877-49b9-a960-6d5689c2d94d" containerName="extract-content" Oct 01 06:56:55 crc kubenswrapper[4661]: E1001 06:56:55.978196 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39647bcb-e877-49b9-a960-6d5689c2d94d" containerName="extract-utilities" Oct 01 06:56:55 crc kubenswrapper[4661]: I1001 06:56:55.978206 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="39647bcb-e877-49b9-a960-6d5689c2d94d" containerName="extract-utilities" Oct 01 06:56:55 crc kubenswrapper[4661]: E1001 06:56:55.978224 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39647bcb-e877-49b9-a960-6d5689c2d94d" containerName="registry-server" Oct 01 06:56:55 crc kubenswrapper[4661]: I1001 06:56:55.978234 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="39647bcb-e877-49b9-a960-6d5689c2d94d" containerName="registry-server" Oct 01 06:56:55 crc kubenswrapper[4661]: E1001 06:56:55.978252 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee5678e0-10d2-4507-8d14-68cb7107ff51" containerName="extract-content" Oct 01 06:56:55 crc kubenswrapper[4661]: I1001 06:56:55.978259 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee5678e0-10d2-4507-8d14-68cb7107ff51" containerName="extract-content" Oct 01 06:56:55 crc kubenswrapper[4661]: E1001 06:56:55.978276 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee5678e0-10d2-4507-8d14-68cb7107ff51" containerName="extract-utilities" Oct 01 06:56:55 crc kubenswrapper[4661]: I1001 06:56:55.978283 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee5678e0-10d2-4507-8d14-68cb7107ff51" containerName="extract-utilities" Oct 01 06:56:55 crc kubenswrapper[4661]: E1001 06:56:55.978301 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee5678e0-10d2-4507-8d14-68cb7107ff51" containerName="registry-server" Oct 01 06:56:55 crc kubenswrapper[4661]: I1001 06:56:55.978308 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee5678e0-10d2-4507-8d14-68cb7107ff51" containerName="registry-server" Oct 01 06:56:55 crc kubenswrapper[4661]: I1001 06:56:55.978546 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee5678e0-10d2-4507-8d14-68cb7107ff51" containerName="registry-server" Oct 01 06:56:55 crc kubenswrapper[4661]: I1001 06:56:55.978589 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="39647bcb-e877-49b9-a960-6d5689c2d94d" containerName="registry-server" Oct 01 06:56:55 crc kubenswrapper[4661]: I1001 06:56:55.980117 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-88fqn" Oct 01 06:56:55 crc kubenswrapper[4661]: I1001 06:56:55.998316 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-88fqn"] Oct 01 06:56:56 crc kubenswrapper[4661]: I1001 06:56:56.169944 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/723194f6-7bc6-410c-8cb1-34099f8f31fc-catalog-content\") pod \"community-operators-88fqn\" (UID: \"723194f6-7bc6-410c-8cb1-34099f8f31fc\") " pod="openshift-marketplace/community-operators-88fqn" Oct 01 06:56:56 crc kubenswrapper[4661]: I1001 06:56:56.170152 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/723194f6-7bc6-410c-8cb1-34099f8f31fc-utilities\") pod \"community-operators-88fqn\" (UID: \"723194f6-7bc6-410c-8cb1-34099f8f31fc\") " pod="openshift-marketplace/community-operators-88fqn" Oct 01 06:56:56 crc kubenswrapper[4661]: I1001 06:56:56.170734 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wms8v\" (UniqueName: \"kubernetes.io/projected/723194f6-7bc6-410c-8cb1-34099f8f31fc-kube-api-access-wms8v\") pod \"community-operators-88fqn\" (UID: \"723194f6-7bc6-410c-8cb1-34099f8f31fc\") " pod="openshift-marketplace/community-operators-88fqn" Oct 01 06:56:56 crc kubenswrapper[4661]: I1001 06:56:56.274703 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wms8v\" (UniqueName: \"kubernetes.io/projected/723194f6-7bc6-410c-8cb1-34099f8f31fc-kube-api-access-wms8v\") pod \"community-operators-88fqn\" (UID: \"723194f6-7bc6-410c-8cb1-34099f8f31fc\") " pod="openshift-marketplace/community-operators-88fqn" Oct 01 06:56:56 crc kubenswrapper[4661]: I1001 06:56:56.274790 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/723194f6-7bc6-410c-8cb1-34099f8f31fc-catalog-content\") pod \"community-operators-88fqn\" (UID: \"723194f6-7bc6-410c-8cb1-34099f8f31fc\") " pod="openshift-marketplace/community-operators-88fqn" Oct 01 06:56:56 crc kubenswrapper[4661]: I1001 06:56:56.274875 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/723194f6-7bc6-410c-8cb1-34099f8f31fc-utilities\") pod \"community-operators-88fqn\" (UID: \"723194f6-7bc6-410c-8cb1-34099f8f31fc\") " pod="openshift-marketplace/community-operators-88fqn" Oct 01 06:56:56 crc kubenswrapper[4661]: I1001 06:56:56.275561 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/723194f6-7bc6-410c-8cb1-34099f8f31fc-utilities\") pod \"community-operators-88fqn\" (UID: \"723194f6-7bc6-410c-8cb1-34099f8f31fc\") " pod="openshift-marketplace/community-operators-88fqn" Oct 01 06:56:56 crc kubenswrapper[4661]: I1001 06:56:56.277728 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/723194f6-7bc6-410c-8cb1-34099f8f31fc-catalog-content\") pod \"community-operators-88fqn\" (UID: \"723194f6-7bc6-410c-8cb1-34099f8f31fc\") " pod="openshift-marketplace/community-operators-88fqn" Oct 01 06:56:56 crc kubenswrapper[4661]: I1001 06:56:56.299308 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wms8v\" (UniqueName: \"kubernetes.io/projected/723194f6-7bc6-410c-8cb1-34099f8f31fc-kube-api-access-wms8v\") pod \"community-operators-88fqn\" (UID: \"723194f6-7bc6-410c-8cb1-34099f8f31fc\") " pod="openshift-marketplace/community-operators-88fqn" Oct 01 06:56:56 crc kubenswrapper[4661]: I1001 06:56:56.313934 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-88fqn" Oct 01 06:56:56 crc kubenswrapper[4661]: I1001 06:56:56.890149 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-88fqn"] Oct 01 06:56:58 crc kubenswrapper[4661]: I1001 06:56:58.147450 4661 generic.go:334] "Generic (PLEG): container finished" podID="723194f6-7bc6-410c-8cb1-34099f8f31fc" containerID="884728d1473c905743ea2642327dea3093d34c342c80593b58526c7efadc1b9e" exitCode=0 Oct 01 06:56:58 crc kubenswrapper[4661]: I1001 06:56:58.147625 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-88fqn" event={"ID":"723194f6-7bc6-410c-8cb1-34099f8f31fc","Type":"ContainerDied","Data":"884728d1473c905743ea2642327dea3093d34c342c80593b58526c7efadc1b9e"} Oct 01 06:56:58 crc kubenswrapper[4661]: I1001 06:56:58.148359 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-88fqn" event={"ID":"723194f6-7bc6-410c-8cb1-34099f8f31fc","Type":"ContainerStarted","Data":"82095ef6186dcab621deb8bcd380adb5cd5eb09f87bdd09f021653ff082aea8e"} Oct 01 06:56:58 crc kubenswrapper[4661]: I1001 06:56:58.150755 4661 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 06:57:00 crc kubenswrapper[4661]: I1001 06:57:00.193352 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-88fqn" event={"ID":"723194f6-7bc6-410c-8cb1-34099f8f31fc","Type":"ContainerStarted","Data":"e57f7b20f474e72d9c91f29dda90485aa5f7f76e29b95661b1c39e6085288839"} Oct 01 06:57:01 crc kubenswrapper[4661]: I1001 06:57:01.209797 4661 generic.go:334] "Generic (PLEG): container finished" podID="723194f6-7bc6-410c-8cb1-34099f8f31fc" containerID="e57f7b20f474e72d9c91f29dda90485aa5f7f76e29b95661b1c39e6085288839" exitCode=0 Oct 01 06:57:01 crc kubenswrapper[4661]: I1001 06:57:01.209853 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-88fqn" event={"ID":"723194f6-7bc6-410c-8cb1-34099f8f31fc","Type":"ContainerDied","Data":"e57f7b20f474e72d9c91f29dda90485aa5f7f76e29b95661b1c39e6085288839"} Oct 01 06:57:02 crc kubenswrapper[4661]: I1001 06:57:02.226409 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-88fqn" event={"ID":"723194f6-7bc6-410c-8cb1-34099f8f31fc","Type":"ContainerStarted","Data":"3d515d1a6537784e95b083aac059bfadee28f34408040d4da934d2d650171d68"} Oct 01 06:57:02 crc kubenswrapper[4661]: I1001 06:57:02.262711 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-88fqn" podStartSLOduration=3.68678023 podStartE2EDuration="7.262684198s" podCreationTimestamp="2025-10-01 06:56:55 +0000 UTC" firstStartedPulling="2025-10-01 06:56:58.150297171 +0000 UTC m=+5267.088275825" lastFinishedPulling="2025-10-01 06:57:01.726201139 +0000 UTC m=+5270.664179793" observedRunningTime="2025-10-01 06:57:02.251299767 +0000 UTC m=+5271.189278411" watchObservedRunningTime="2025-10-01 06:57:02.262684198 +0000 UTC m=+5271.200662842" Oct 01 06:57:05 crc kubenswrapper[4661]: I1001 06:57:05.758333 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 06:57:06 crc kubenswrapper[4661]: I1001 06:57:06.292069 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"ddf9c0cd86e3b7f47956b2704aec5adc69d4333bced181592428d3a7e762e724"} Oct 01 06:57:06 crc kubenswrapper[4661]: I1001 06:57:06.314889 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-88fqn" Oct 01 06:57:06 crc kubenswrapper[4661]: I1001 06:57:06.314956 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-88fqn" Oct 01 06:57:06 crc kubenswrapper[4661]: I1001 06:57:06.396182 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-88fqn" Oct 01 06:57:07 crc kubenswrapper[4661]: I1001 06:57:07.399468 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-88fqn" Oct 01 06:57:07 crc kubenswrapper[4661]: I1001 06:57:07.460905 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-88fqn"] Oct 01 06:57:09 crc kubenswrapper[4661]: I1001 06:57:09.332316 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-88fqn" podUID="723194f6-7bc6-410c-8cb1-34099f8f31fc" containerName="registry-server" containerID="cri-o://3d515d1a6537784e95b083aac059bfadee28f34408040d4da934d2d650171d68" gracePeriod=2 Oct 01 06:57:10 crc kubenswrapper[4661]: I1001 06:57:10.358080 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-88fqn" Oct 01 06:57:10 crc kubenswrapper[4661]: I1001 06:57:10.360616 4661 generic.go:334] "Generic (PLEG): container finished" podID="723194f6-7bc6-410c-8cb1-34099f8f31fc" containerID="3d515d1a6537784e95b083aac059bfadee28f34408040d4da934d2d650171d68" exitCode=0 Oct 01 06:57:10 crc kubenswrapper[4661]: I1001 06:57:10.360755 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-88fqn" event={"ID":"723194f6-7bc6-410c-8cb1-34099f8f31fc","Type":"ContainerDied","Data":"3d515d1a6537784e95b083aac059bfadee28f34408040d4da934d2d650171d68"} Oct 01 06:57:10 crc kubenswrapper[4661]: I1001 06:57:10.360814 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-88fqn" event={"ID":"723194f6-7bc6-410c-8cb1-34099f8f31fc","Type":"ContainerDied","Data":"82095ef6186dcab621deb8bcd380adb5cd5eb09f87bdd09f021653ff082aea8e"} Oct 01 06:57:10 crc kubenswrapper[4661]: I1001 06:57:10.360847 4661 scope.go:117] "RemoveContainer" containerID="3d515d1a6537784e95b083aac059bfadee28f34408040d4da934d2d650171d68" Oct 01 06:57:10 crc kubenswrapper[4661]: I1001 06:57:10.391425 4661 scope.go:117] "RemoveContainer" containerID="e57f7b20f474e72d9c91f29dda90485aa5f7f76e29b95661b1c39e6085288839" Oct 01 06:57:10 crc kubenswrapper[4661]: I1001 06:57:10.432091 4661 scope.go:117] "RemoveContainer" containerID="884728d1473c905743ea2642327dea3093d34c342c80593b58526c7efadc1b9e" Oct 01 06:57:10 crc kubenswrapper[4661]: I1001 06:57:10.459651 4661 scope.go:117] "RemoveContainer" containerID="3d515d1a6537784e95b083aac059bfadee28f34408040d4da934d2d650171d68" Oct 01 06:57:10 crc kubenswrapper[4661]: E1001 06:57:10.460067 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d515d1a6537784e95b083aac059bfadee28f34408040d4da934d2d650171d68\": container with ID starting with 3d515d1a6537784e95b083aac059bfadee28f34408040d4da934d2d650171d68 not found: ID does not exist" containerID="3d515d1a6537784e95b083aac059bfadee28f34408040d4da934d2d650171d68" Oct 01 06:57:10 crc kubenswrapper[4661]: I1001 06:57:10.460116 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d515d1a6537784e95b083aac059bfadee28f34408040d4da934d2d650171d68"} err="failed to get container status \"3d515d1a6537784e95b083aac059bfadee28f34408040d4da934d2d650171d68\": rpc error: code = NotFound desc = could not find container \"3d515d1a6537784e95b083aac059bfadee28f34408040d4da934d2d650171d68\": container with ID starting with 3d515d1a6537784e95b083aac059bfadee28f34408040d4da934d2d650171d68 not found: ID does not exist" Oct 01 06:57:10 crc kubenswrapper[4661]: I1001 06:57:10.460148 4661 scope.go:117] "RemoveContainer" containerID="e57f7b20f474e72d9c91f29dda90485aa5f7f76e29b95661b1c39e6085288839" Oct 01 06:57:10 crc kubenswrapper[4661]: E1001 06:57:10.460438 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e57f7b20f474e72d9c91f29dda90485aa5f7f76e29b95661b1c39e6085288839\": container with ID starting with e57f7b20f474e72d9c91f29dda90485aa5f7f76e29b95661b1c39e6085288839 not found: ID does not exist" containerID="e57f7b20f474e72d9c91f29dda90485aa5f7f76e29b95661b1c39e6085288839" Oct 01 06:57:10 crc kubenswrapper[4661]: I1001 06:57:10.460472 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e57f7b20f474e72d9c91f29dda90485aa5f7f76e29b95661b1c39e6085288839"} err="failed to get container status \"e57f7b20f474e72d9c91f29dda90485aa5f7f76e29b95661b1c39e6085288839\": rpc error: code = NotFound desc = could not find container \"e57f7b20f474e72d9c91f29dda90485aa5f7f76e29b95661b1c39e6085288839\": container with ID starting with e57f7b20f474e72d9c91f29dda90485aa5f7f76e29b95661b1c39e6085288839 not found: ID does not exist" Oct 01 06:57:10 crc kubenswrapper[4661]: I1001 06:57:10.460488 4661 scope.go:117] "RemoveContainer" containerID="884728d1473c905743ea2642327dea3093d34c342c80593b58526c7efadc1b9e" Oct 01 06:57:10 crc kubenswrapper[4661]: E1001 06:57:10.461178 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"884728d1473c905743ea2642327dea3093d34c342c80593b58526c7efadc1b9e\": container with ID starting with 884728d1473c905743ea2642327dea3093d34c342c80593b58526c7efadc1b9e not found: ID does not exist" containerID="884728d1473c905743ea2642327dea3093d34c342c80593b58526c7efadc1b9e" Oct 01 06:57:10 crc kubenswrapper[4661]: I1001 06:57:10.461222 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"884728d1473c905743ea2642327dea3093d34c342c80593b58526c7efadc1b9e"} err="failed to get container status \"884728d1473c905743ea2642327dea3093d34c342c80593b58526c7efadc1b9e\": rpc error: code = NotFound desc = could not find container \"884728d1473c905743ea2642327dea3093d34c342c80593b58526c7efadc1b9e\": container with ID starting with 884728d1473c905743ea2642327dea3093d34c342c80593b58526c7efadc1b9e not found: ID does not exist" Oct 01 06:57:10 crc kubenswrapper[4661]: I1001 06:57:10.545069 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/723194f6-7bc6-410c-8cb1-34099f8f31fc-catalog-content\") pod \"723194f6-7bc6-410c-8cb1-34099f8f31fc\" (UID: \"723194f6-7bc6-410c-8cb1-34099f8f31fc\") " Oct 01 06:57:10 crc kubenswrapper[4661]: I1001 06:57:10.545310 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wms8v\" (UniqueName: \"kubernetes.io/projected/723194f6-7bc6-410c-8cb1-34099f8f31fc-kube-api-access-wms8v\") pod \"723194f6-7bc6-410c-8cb1-34099f8f31fc\" (UID: \"723194f6-7bc6-410c-8cb1-34099f8f31fc\") " Oct 01 06:57:10 crc kubenswrapper[4661]: I1001 06:57:10.545696 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/723194f6-7bc6-410c-8cb1-34099f8f31fc-utilities\") pod \"723194f6-7bc6-410c-8cb1-34099f8f31fc\" (UID: \"723194f6-7bc6-410c-8cb1-34099f8f31fc\") " Oct 01 06:57:10 crc kubenswrapper[4661]: I1001 06:57:10.548336 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/723194f6-7bc6-410c-8cb1-34099f8f31fc-utilities" (OuterVolumeSpecName: "utilities") pod "723194f6-7bc6-410c-8cb1-34099f8f31fc" (UID: "723194f6-7bc6-410c-8cb1-34099f8f31fc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:57:10 crc kubenswrapper[4661]: I1001 06:57:10.554248 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/723194f6-7bc6-410c-8cb1-34099f8f31fc-kube-api-access-wms8v" (OuterVolumeSpecName: "kube-api-access-wms8v") pod "723194f6-7bc6-410c-8cb1-34099f8f31fc" (UID: "723194f6-7bc6-410c-8cb1-34099f8f31fc"). InnerVolumeSpecName "kube-api-access-wms8v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:57:10 crc kubenswrapper[4661]: I1001 06:57:10.622300 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/723194f6-7bc6-410c-8cb1-34099f8f31fc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "723194f6-7bc6-410c-8cb1-34099f8f31fc" (UID: "723194f6-7bc6-410c-8cb1-34099f8f31fc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:57:10 crc kubenswrapper[4661]: I1001 06:57:10.648220 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/723194f6-7bc6-410c-8cb1-34099f8f31fc-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:57:10 crc kubenswrapper[4661]: I1001 06:57:10.648273 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/723194f6-7bc6-410c-8cb1-34099f8f31fc-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:57:10 crc kubenswrapper[4661]: I1001 06:57:10.648296 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wms8v\" (UniqueName: \"kubernetes.io/projected/723194f6-7bc6-410c-8cb1-34099f8f31fc-kube-api-access-wms8v\") on node \"crc\" DevicePath \"\"" Oct 01 06:57:11 crc kubenswrapper[4661]: I1001 06:57:11.377464 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-88fqn" Oct 01 06:57:11 crc kubenswrapper[4661]: I1001 06:57:11.424438 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-88fqn"] Oct 01 06:57:11 crc kubenswrapper[4661]: I1001 06:57:11.434375 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-88fqn"] Oct 01 06:57:11 crc kubenswrapper[4661]: I1001 06:57:11.778943 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="723194f6-7bc6-410c-8cb1-34099f8f31fc" path="/var/lib/kubelet/pods/723194f6-7bc6-410c-8cb1-34099f8f31fc/volumes" Oct 01 06:59:22 crc kubenswrapper[4661]: I1001 06:59:22.056494 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zdxnv"] Oct 01 06:59:22 crc kubenswrapper[4661]: E1001 06:59:22.062275 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="723194f6-7bc6-410c-8cb1-34099f8f31fc" containerName="extract-content" Oct 01 06:59:22 crc kubenswrapper[4661]: I1001 06:59:22.062304 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="723194f6-7bc6-410c-8cb1-34099f8f31fc" containerName="extract-content" Oct 01 06:59:22 crc kubenswrapper[4661]: E1001 06:59:22.062345 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="723194f6-7bc6-410c-8cb1-34099f8f31fc" containerName="extract-utilities" Oct 01 06:59:22 crc kubenswrapper[4661]: I1001 06:59:22.062353 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="723194f6-7bc6-410c-8cb1-34099f8f31fc" containerName="extract-utilities" Oct 01 06:59:22 crc kubenswrapper[4661]: E1001 06:59:22.062379 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="723194f6-7bc6-410c-8cb1-34099f8f31fc" containerName="registry-server" Oct 01 06:59:22 crc kubenswrapper[4661]: I1001 06:59:22.062385 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="723194f6-7bc6-410c-8cb1-34099f8f31fc" containerName="registry-server" Oct 01 06:59:22 crc kubenswrapper[4661]: I1001 06:59:22.062645 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="723194f6-7bc6-410c-8cb1-34099f8f31fc" containerName="registry-server" Oct 01 06:59:22 crc kubenswrapper[4661]: I1001 06:59:22.064073 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zdxnv" Oct 01 06:59:22 crc kubenswrapper[4661]: I1001 06:59:22.067403 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zdxnv"] Oct 01 06:59:22 crc kubenswrapper[4661]: I1001 06:59:22.204021 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77871912-431d-4b3c-b6ff-d357a5cf2c10-catalog-content\") pod \"certified-operators-zdxnv\" (UID: \"77871912-431d-4b3c-b6ff-d357a5cf2c10\") " pod="openshift-marketplace/certified-operators-zdxnv" Oct 01 06:59:22 crc kubenswrapper[4661]: I1001 06:59:22.204060 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77871912-431d-4b3c-b6ff-d357a5cf2c10-utilities\") pod \"certified-operators-zdxnv\" (UID: \"77871912-431d-4b3c-b6ff-d357a5cf2c10\") " pod="openshift-marketplace/certified-operators-zdxnv" Oct 01 06:59:22 crc kubenswrapper[4661]: I1001 06:59:22.204089 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvd7w\" (UniqueName: \"kubernetes.io/projected/77871912-431d-4b3c-b6ff-d357a5cf2c10-kube-api-access-cvd7w\") pod \"certified-operators-zdxnv\" (UID: \"77871912-431d-4b3c-b6ff-d357a5cf2c10\") " pod="openshift-marketplace/certified-operators-zdxnv" Oct 01 06:59:22 crc kubenswrapper[4661]: I1001 06:59:22.306495 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77871912-431d-4b3c-b6ff-d357a5cf2c10-catalog-content\") pod \"certified-operators-zdxnv\" (UID: \"77871912-431d-4b3c-b6ff-d357a5cf2c10\") " pod="openshift-marketplace/certified-operators-zdxnv" Oct 01 06:59:22 crc kubenswrapper[4661]: I1001 06:59:22.306535 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77871912-431d-4b3c-b6ff-d357a5cf2c10-utilities\") pod \"certified-operators-zdxnv\" (UID: \"77871912-431d-4b3c-b6ff-d357a5cf2c10\") " pod="openshift-marketplace/certified-operators-zdxnv" Oct 01 06:59:22 crc kubenswrapper[4661]: I1001 06:59:22.306566 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvd7w\" (UniqueName: \"kubernetes.io/projected/77871912-431d-4b3c-b6ff-d357a5cf2c10-kube-api-access-cvd7w\") pod \"certified-operators-zdxnv\" (UID: \"77871912-431d-4b3c-b6ff-d357a5cf2c10\") " pod="openshift-marketplace/certified-operators-zdxnv" Oct 01 06:59:22 crc kubenswrapper[4661]: I1001 06:59:22.307065 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77871912-431d-4b3c-b6ff-d357a5cf2c10-utilities\") pod \"certified-operators-zdxnv\" (UID: \"77871912-431d-4b3c-b6ff-d357a5cf2c10\") " pod="openshift-marketplace/certified-operators-zdxnv" Oct 01 06:59:22 crc kubenswrapper[4661]: I1001 06:59:22.307183 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77871912-431d-4b3c-b6ff-d357a5cf2c10-catalog-content\") pod \"certified-operators-zdxnv\" (UID: \"77871912-431d-4b3c-b6ff-d357a5cf2c10\") " pod="openshift-marketplace/certified-operators-zdxnv" Oct 01 06:59:22 crc kubenswrapper[4661]: I1001 06:59:22.329751 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvd7w\" (UniqueName: \"kubernetes.io/projected/77871912-431d-4b3c-b6ff-d357a5cf2c10-kube-api-access-cvd7w\") pod \"certified-operators-zdxnv\" (UID: \"77871912-431d-4b3c-b6ff-d357a5cf2c10\") " pod="openshift-marketplace/certified-operators-zdxnv" Oct 01 06:59:22 crc kubenswrapper[4661]: I1001 06:59:22.387859 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zdxnv" Oct 01 06:59:22 crc kubenswrapper[4661]: I1001 06:59:22.962050 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zdxnv"] Oct 01 06:59:23 crc kubenswrapper[4661]: I1001 06:59:23.966226 4661 generic.go:334] "Generic (PLEG): container finished" podID="77871912-431d-4b3c-b6ff-d357a5cf2c10" containerID="d532e43626aa84f76de7a1241167f8682e4995d75051f4c832c0c597281f45e9" exitCode=0 Oct 01 06:59:23 crc kubenswrapper[4661]: I1001 06:59:23.966288 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zdxnv" event={"ID":"77871912-431d-4b3c-b6ff-d357a5cf2c10","Type":"ContainerDied","Data":"d532e43626aa84f76de7a1241167f8682e4995d75051f4c832c0c597281f45e9"} Oct 01 06:59:23 crc kubenswrapper[4661]: I1001 06:59:23.966615 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zdxnv" event={"ID":"77871912-431d-4b3c-b6ff-d357a5cf2c10","Type":"ContainerStarted","Data":"116ce8bf2375ff2462163008f0a694a5063de574ca27912b7b9e2d648e09f9f1"} Oct 01 06:59:24 crc kubenswrapper[4661]: I1001 06:59:24.980909 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zdxnv" event={"ID":"77871912-431d-4b3c-b6ff-d357a5cf2c10","Type":"ContainerStarted","Data":"38e6d050bf8a5784077773026d636c525d02f4190300745da2be6912d50a4e6e"} Oct 01 06:59:25 crc kubenswrapper[4661]: I1001 06:59:25.997870 4661 generic.go:334] "Generic (PLEG): container finished" podID="77871912-431d-4b3c-b6ff-d357a5cf2c10" containerID="38e6d050bf8a5784077773026d636c525d02f4190300745da2be6912d50a4e6e" exitCode=0 Oct 01 06:59:25 crc kubenswrapper[4661]: I1001 06:59:25.998008 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zdxnv" event={"ID":"77871912-431d-4b3c-b6ff-d357a5cf2c10","Type":"ContainerDied","Data":"38e6d050bf8a5784077773026d636c525d02f4190300745da2be6912d50a4e6e"} Oct 01 06:59:27 crc kubenswrapper[4661]: I1001 06:59:27.008883 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zdxnv" event={"ID":"77871912-431d-4b3c-b6ff-d357a5cf2c10","Type":"ContainerStarted","Data":"3aa3d8cfbc9b231a342a772ff926b3faa4d3b5b38122fa3aae86515de32a886d"} Oct 01 06:59:27 crc kubenswrapper[4661]: I1001 06:59:27.031830 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zdxnv" podStartSLOduration=2.601298864 podStartE2EDuration="5.031808984s" podCreationTimestamp="2025-10-01 06:59:22 +0000 UTC" firstStartedPulling="2025-10-01 06:59:23.968926993 +0000 UTC m=+5412.906905617" lastFinishedPulling="2025-10-01 06:59:26.399437103 +0000 UTC m=+5415.337415737" observedRunningTime="2025-10-01 06:59:27.026421467 +0000 UTC m=+5415.964400081" watchObservedRunningTime="2025-10-01 06:59:27.031808984 +0000 UTC m=+5415.969787598" Oct 01 06:59:32 crc kubenswrapper[4661]: I1001 06:59:32.388486 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zdxnv" Oct 01 06:59:32 crc kubenswrapper[4661]: I1001 06:59:32.389184 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zdxnv" Oct 01 06:59:32 crc kubenswrapper[4661]: I1001 06:59:32.481176 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zdxnv" Oct 01 06:59:33 crc kubenswrapper[4661]: I1001 06:59:33.176621 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zdxnv" Oct 01 06:59:33 crc kubenswrapper[4661]: I1001 06:59:33.239829 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zdxnv"] Oct 01 06:59:34 crc kubenswrapper[4661]: I1001 06:59:34.309687 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:59:34 crc kubenswrapper[4661]: I1001 06:59:34.310690 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:59:35 crc kubenswrapper[4661]: I1001 06:59:35.111187 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zdxnv" podUID="77871912-431d-4b3c-b6ff-d357a5cf2c10" containerName="registry-server" containerID="cri-o://3aa3d8cfbc9b231a342a772ff926b3faa4d3b5b38122fa3aae86515de32a886d" gracePeriod=2 Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.123285 4661 generic.go:334] "Generic (PLEG): container finished" podID="77871912-431d-4b3c-b6ff-d357a5cf2c10" containerID="3aa3d8cfbc9b231a342a772ff926b3faa4d3b5b38122fa3aae86515de32a886d" exitCode=0 Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.123580 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zdxnv" event={"ID":"77871912-431d-4b3c-b6ff-d357a5cf2c10","Type":"ContainerDied","Data":"3aa3d8cfbc9b231a342a772ff926b3faa4d3b5b38122fa3aae86515de32a886d"} Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.123603 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zdxnv" event={"ID":"77871912-431d-4b3c-b6ff-d357a5cf2c10","Type":"ContainerDied","Data":"116ce8bf2375ff2462163008f0a694a5063de574ca27912b7b9e2d648e09f9f1"} Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.123615 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="116ce8bf2375ff2462163008f0a694a5063de574ca27912b7b9e2d648e09f9f1" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.179350 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zdxnv" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.218196 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77871912-431d-4b3c-b6ff-d357a5cf2c10-catalog-content\") pod \"77871912-431d-4b3c-b6ff-d357a5cf2c10\" (UID: \"77871912-431d-4b3c-b6ff-d357a5cf2c10\") " Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.218276 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvd7w\" (UniqueName: \"kubernetes.io/projected/77871912-431d-4b3c-b6ff-d357a5cf2c10-kube-api-access-cvd7w\") pod \"77871912-431d-4b3c-b6ff-d357a5cf2c10\" (UID: \"77871912-431d-4b3c-b6ff-d357a5cf2c10\") " Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.218418 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77871912-431d-4b3c-b6ff-d357a5cf2c10-utilities\") pod \"77871912-431d-4b3c-b6ff-d357a5cf2c10\" (UID: \"77871912-431d-4b3c-b6ff-d357a5cf2c10\") " Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.219235 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77871912-431d-4b3c-b6ff-d357a5cf2c10-utilities" (OuterVolumeSpecName: "utilities") pod "77871912-431d-4b3c-b6ff-d357a5cf2c10" (UID: "77871912-431d-4b3c-b6ff-d357a5cf2c10"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.234219 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77871912-431d-4b3c-b6ff-d357a5cf2c10-kube-api-access-cvd7w" (OuterVolumeSpecName: "kube-api-access-cvd7w") pod "77871912-431d-4b3c-b6ff-d357a5cf2c10" (UID: "77871912-431d-4b3c-b6ff-d357a5cf2c10"). InnerVolumeSpecName "kube-api-access-cvd7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.281394 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77871912-431d-4b3c-b6ff-d357a5cf2c10-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "77871912-431d-4b3c-b6ff-d357a5cf2c10" (UID: "77871912-431d-4b3c-b6ff-d357a5cf2c10"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.320500 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77871912-431d-4b3c-b6ff-d357a5cf2c10-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.320541 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvd7w\" (UniqueName: \"kubernetes.io/projected/77871912-431d-4b3c-b6ff-d357a5cf2c10-kube-api-access-cvd7w\") on node \"crc\" DevicePath \"\"" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.320555 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77871912-431d-4b3c-b6ff-d357a5cf2c10-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.439412 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-cn94x"] Oct 01 06:59:36 crc kubenswrapper[4661]: E1001 06:59:36.445172 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77871912-431d-4b3c-b6ff-d357a5cf2c10" containerName="registry-server" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.445221 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="77871912-431d-4b3c-b6ff-d357a5cf2c10" containerName="registry-server" Oct 01 06:59:36 crc kubenswrapper[4661]: E1001 06:59:36.445297 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77871912-431d-4b3c-b6ff-d357a5cf2c10" containerName="extract-utilities" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.445312 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="77871912-431d-4b3c-b6ff-d357a5cf2c10" containerName="extract-utilities" Oct 01 06:59:36 crc kubenswrapper[4661]: E1001 06:59:36.445331 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77871912-431d-4b3c-b6ff-d357a5cf2c10" containerName="extract-content" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.445342 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="77871912-431d-4b3c-b6ff-d357a5cf2c10" containerName="extract-content" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.445782 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="77871912-431d-4b3c-b6ff-d357a5cf2c10" containerName="registry-server" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.447833 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cn94x" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.477621 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cn94x"] Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.525352 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/180e89f5-bf97-4bef-be9a-cd2d44271a60-utilities\") pod \"redhat-marketplace-cn94x\" (UID: \"180e89f5-bf97-4bef-be9a-cd2d44271a60\") " pod="openshift-marketplace/redhat-marketplace-cn94x" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.525703 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/180e89f5-bf97-4bef-be9a-cd2d44271a60-catalog-content\") pod \"redhat-marketplace-cn94x\" (UID: \"180e89f5-bf97-4bef-be9a-cd2d44271a60\") " pod="openshift-marketplace/redhat-marketplace-cn94x" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.525929 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksmbg\" (UniqueName: \"kubernetes.io/projected/180e89f5-bf97-4bef-be9a-cd2d44271a60-kube-api-access-ksmbg\") pod \"redhat-marketplace-cn94x\" (UID: \"180e89f5-bf97-4bef-be9a-cd2d44271a60\") " pod="openshift-marketplace/redhat-marketplace-cn94x" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.627798 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/180e89f5-bf97-4bef-be9a-cd2d44271a60-utilities\") pod \"redhat-marketplace-cn94x\" (UID: \"180e89f5-bf97-4bef-be9a-cd2d44271a60\") " pod="openshift-marketplace/redhat-marketplace-cn94x" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.627884 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/180e89f5-bf97-4bef-be9a-cd2d44271a60-catalog-content\") pod \"redhat-marketplace-cn94x\" (UID: \"180e89f5-bf97-4bef-be9a-cd2d44271a60\") " pod="openshift-marketplace/redhat-marketplace-cn94x" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.627932 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksmbg\" (UniqueName: \"kubernetes.io/projected/180e89f5-bf97-4bef-be9a-cd2d44271a60-kube-api-access-ksmbg\") pod \"redhat-marketplace-cn94x\" (UID: \"180e89f5-bf97-4bef-be9a-cd2d44271a60\") " pod="openshift-marketplace/redhat-marketplace-cn94x" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.628264 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/180e89f5-bf97-4bef-be9a-cd2d44271a60-utilities\") pod \"redhat-marketplace-cn94x\" (UID: \"180e89f5-bf97-4bef-be9a-cd2d44271a60\") " pod="openshift-marketplace/redhat-marketplace-cn94x" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.628400 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/180e89f5-bf97-4bef-be9a-cd2d44271a60-catalog-content\") pod \"redhat-marketplace-cn94x\" (UID: \"180e89f5-bf97-4bef-be9a-cd2d44271a60\") " pod="openshift-marketplace/redhat-marketplace-cn94x" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.648335 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksmbg\" (UniqueName: \"kubernetes.io/projected/180e89f5-bf97-4bef-be9a-cd2d44271a60-kube-api-access-ksmbg\") pod \"redhat-marketplace-cn94x\" (UID: \"180e89f5-bf97-4bef-be9a-cd2d44271a60\") " pod="openshift-marketplace/redhat-marketplace-cn94x" Oct 01 06:59:36 crc kubenswrapper[4661]: I1001 06:59:36.780676 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cn94x" Oct 01 06:59:37 crc kubenswrapper[4661]: I1001 06:59:37.133821 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zdxnv" Oct 01 06:59:37 crc kubenswrapper[4661]: I1001 06:59:37.176041 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zdxnv"] Oct 01 06:59:37 crc kubenswrapper[4661]: I1001 06:59:37.189664 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zdxnv"] Oct 01 06:59:37 crc kubenswrapper[4661]: I1001 06:59:37.236100 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cn94x"] Oct 01 06:59:37 crc kubenswrapper[4661]: W1001 06:59:37.245264 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod180e89f5_bf97_4bef_be9a_cd2d44271a60.slice/crio-15a8767c694d8c031bd97ce6df03dfbbc037b9f214d03a94095532d8b4a73aca WatchSource:0}: Error finding container 15a8767c694d8c031bd97ce6df03dfbbc037b9f214d03a94095532d8b4a73aca: Status 404 returned error can't find the container with id 15a8767c694d8c031bd97ce6df03dfbbc037b9f214d03a94095532d8b4a73aca Oct 01 06:59:37 crc kubenswrapper[4661]: I1001 06:59:37.773242 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77871912-431d-4b3c-b6ff-d357a5cf2c10" path="/var/lib/kubelet/pods/77871912-431d-4b3c-b6ff-d357a5cf2c10/volumes" Oct 01 06:59:38 crc kubenswrapper[4661]: I1001 06:59:38.167699 4661 generic.go:334] "Generic (PLEG): container finished" podID="180e89f5-bf97-4bef-be9a-cd2d44271a60" containerID="64a4da7bf71845c3d8b3620b970976382f7a481b5c6a4b56459cd1fc59bf19b5" exitCode=0 Oct 01 06:59:38 crc kubenswrapper[4661]: I1001 06:59:38.167752 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cn94x" event={"ID":"180e89f5-bf97-4bef-be9a-cd2d44271a60","Type":"ContainerDied","Data":"64a4da7bf71845c3d8b3620b970976382f7a481b5c6a4b56459cd1fc59bf19b5"} Oct 01 06:59:38 crc kubenswrapper[4661]: I1001 06:59:38.167784 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cn94x" event={"ID":"180e89f5-bf97-4bef-be9a-cd2d44271a60","Type":"ContainerStarted","Data":"15a8767c694d8c031bd97ce6df03dfbbc037b9f214d03a94095532d8b4a73aca"} Oct 01 06:59:39 crc kubenswrapper[4661]: I1001 06:59:39.181535 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cn94x" event={"ID":"180e89f5-bf97-4bef-be9a-cd2d44271a60","Type":"ContainerStarted","Data":"1152dc730452b5d69114db323e3865ce14b7afc3ef95e21a77feb354c5ac95d6"} Oct 01 06:59:40 crc kubenswrapper[4661]: I1001 06:59:40.196496 4661 generic.go:334] "Generic (PLEG): container finished" podID="180e89f5-bf97-4bef-be9a-cd2d44271a60" containerID="1152dc730452b5d69114db323e3865ce14b7afc3ef95e21a77feb354c5ac95d6" exitCode=0 Oct 01 06:59:40 crc kubenswrapper[4661]: I1001 06:59:40.196592 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cn94x" event={"ID":"180e89f5-bf97-4bef-be9a-cd2d44271a60","Type":"ContainerDied","Data":"1152dc730452b5d69114db323e3865ce14b7afc3ef95e21a77feb354c5ac95d6"} Oct 01 06:59:41 crc kubenswrapper[4661]: I1001 06:59:41.206413 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cn94x" event={"ID":"180e89f5-bf97-4bef-be9a-cd2d44271a60","Type":"ContainerStarted","Data":"9740f4b3f94ec0b9ee0637d1c56979bd196cc721db9d6131d3cba85daa703d4d"} Oct 01 06:59:41 crc kubenswrapper[4661]: I1001 06:59:41.233162 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-cn94x" podStartSLOduration=2.813588972 podStartE2EDuration="5.233145551s" podCreationTimestamp="2025-10-01 06:59:36 +0000 UTC" firstStartedPulling="2025-10-01 06:59:38.170748273 +0000 UTC m=+5427.108726887" lastFinishedPulling="2025-10-01 06:59:40.590304812 +0000 UTC m=+5429.528283466" observedRunningTime="2025-10-01 06:59:41.227092436 +0000 UTC m=+5430.165071040" watchObservedRunningTime="2025-10-01 06:59:41.233145551 +0000 UTC m=+5430.171124165" Oct 01 06:59:46 crc kubenswrapper[4661]: I1001 06:59:46.781801 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-cn94x" Oct 01 06:59:46 crc kubenswrapper[4661]: I1001 06:59:46.782515 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-cn94x" Oct 01 06:59:46 crc kubenswrapper[4661]: I1001 06:59:46.893703 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-cn94x" Oct 01 06:59:47 crc kubenswrapper[4661]: I1001 06:59:47.375040 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-cn94x" Oct 01 06:59:47 crc kubenswrapper[4661]: I1001 06:59:47.463469 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cn94x"] Oct 01 06:59:49 crc kubenswrapper[4661]: I1001 06:59:49.299384 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-cn94x" podUID="180e89f5-bf97-4bef-be9a-cd2d44271a60" containerName="registry-server" containerID="cri-o://9740f4b3f94ec0b9ee0637d1c56979bd196cc721db9d6131d3cba85daa703d4d" gracePeriod=2 Oct 01 06:59:49 crc kubenswrapper[4661]: I1001 06:59:49.835140 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cn94x" Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.026413 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/180e89f5-bf97-4bef-be9a-cd2d44271a60-utilities\") pod \"180e89f5-bf97-4bef-be9a-cd2d44271a60\" (UID: \"180e89f5-bf97-4bef-be9a-cd2d44271a60\") " Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.026512 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/180e89f5-bf97-4bef-be9a-cd2d44271a60-catalog-content\") pod \"180e89f5-bf97-4bef-be9a-cd2d44271a60\" (UID: \"180e89f5-bf97-4bef-be9a-cd2d44271a60\") " Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.026592 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ksmbg\" (UniqueName: \"kubernetes.io/projected/180e89f5-bf97-4bef-be9a-cd2d44271a60-kube-api-access-ksmbg\") pod \"180e89f5-bf97-4bef-be9a-cd2d44271a60\" (UID: \"180e89f5-bf97-4bef-be9a-cd2d44271a60\") " Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.028056 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/180e89f5-bf97-4bef-be9a-cd2d44271a60-utilities" (OuterVolumeSpecName: "utilities") pod "180e89f5-bf97-4bef-be9a-cd2d44271a60" (UID: "180e89f5-bf97-4bef-be9a-cd2d44271a60"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.035963 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/180e89f5-bf97-4bef-be9a-cd2d44271a60-kube-api-access-ksmbg" (OuterVolumeSpecName: "kube-api-access-ksmbg") pod "180e89f5-bf97-4bef-be9a-cd2d44271a60" (UID: "180e89f5-bf97-4bef-be9a-cd2d44271a60"). InnerVolumeSpecName "kube-api-access-ksmbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.052417 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/180e89f5-bf97-4bef-be9a-cd2d44271a60-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "180e89f5-bf97-4bef-be9a-cd2d44271a60" (UID: "180e89f5-bf97-4bef-be9a-cd2d44271a60"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.130046 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/180e89f5-bf97-4bef-be9a-cd2d44271a60-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.130100 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/180e89f5-bf97-4bef-be9a-cd2d44271a60-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.130122 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ksmbg\" (UniqueName: \"kubernetes.io/projected/180e89f5-bf97-4bef-be9a-cd2d44271a60-kube-api-access-ksmbg\") on node \"crc\" DevicePath \"\"" Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.319157 4661 generic.go:334] "Generic (PLEG): container finished" podID="180e89f5-bf97-4bef-be9a-cd2d44271a60" containerID="9740f4b3f94ec0b9ee0637d1c56979bd196cc721db9d6131d3cba85daa703d4d" exitCode=0 Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.319263 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cn94x" Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.319280 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cn94x" event={"ID":"180e89f5-bf97-4bef-be9a-cd2d44271a60","Type":"ContainerDied","Data":"9740f4b3f94ec0b9ee0637d1c56979bd196cc721db9d6131d3cba85daa703d4d"} Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.319700 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cn94x" event={"ID":"180e89f5-bf97-4bef-be9a-cd2d44271a60","Type":"ContainerDied","Data":"15a8767c694d8c031bd97ce6df03dfbbc037b9f214d03a94095532d8b4a73aca"} Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.319732 4661 scope.go:117] "RemoveContainer" containerID="9740f4b3f94ec0b9ee0637d1c56979bd196cc721db9d6131d3cba85daa703d4d" Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.357780 4661 scope.go:117] "RemoveContainer" containerID="1152dc730452b5d69114db323e3865ce14b7afc3ef95e21a77feb354c5ac95d6" Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.404840 4661 scope.go:117] "RemoveContainer" containerID="64a4da7bf71845c3d8b3620b970976382f7a481b5c6a4b56459cd1fc59bf19b5" Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.408286 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cn94x"] Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.426149 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-cn94x"] Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.493550 4661 scope.go:117] "RemoveContainer" containerID="9740f4b3f94ec0b9ee0637d1c56979bd196cc721db9d6131d3cba85daa703d4d" Oct 01 06:59:50 crc kubenswrapper[4661]: E1001 06:59:50.494182 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9740f4b3f94ec0b9ee0637d1c56979bd196cc721db9d6131d3cba85daa703d4d\": container with ID starting with 9740f4b3f94ec0b9ee0637d1c56979bd196cc721db9d6131d3cba85daa703d4d not found: ID does not exist" containerID="9740f4b3f94ec0b9ee0637d1c56979bd196cc721db9d6131d3cba85daa703d4d" Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.494227 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9740f4b3f94ec0b9ee0637d1c56979bd196cc721db9d6131d3cba85daa703d4d"} err="failed to get container status \"9740f4b3f94ec0b9ee0637d1c56979bd196cc721db9d6131d3cba85daa703d4d\": rpc error: code = NotFound desc = could not find container \"9740f4b3f94ec0b9ee0637d1c56979bd196cc721db9d6131d3cba85daa703d4d\": container with ID starting with 9740f4b3f94ec0b9ee0637d1c56979bd196cc721db9d6131d3cba85daa703d4d not found: ID does not exist" Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.494256 4661 scope.go:117] "RemoveContainer" containerID="1152dc730452b5d69114db323e3865ce14b7afc3ef95e21a77feb354c5ac95d6" Oct 01 06:59:50 crc kubenswrapper[4661]: E1001 06:59:50.494683 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1152dc730452b5d69114db323e3865ce14b7afc3ef95e21a77feb354c5ac95d6\": container with ID starting with 1152dc730452b5d69114db323e3865ce14b7afc3ef95e21a77feb354c5ac95d6 not found: ID does not exist" containerID="1152dc730452b5d69114db323e3865ce14b7afc3ef95e21a77feb354c5ac95d6" Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.494706 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1152dc730452b5d69114db323e3865ce14b7afc3ef95e21a77feb354c5ac95d6"} err="failed to get container status \"1152dc730452b5d69114db323e3865ce14b7afc3ef95e21a77feb354c5ac95d6\": rpc error: code = NotFound desc = could not find container \"1152dc730452b5d69114db323e3865ce14b7afc3ef95e21a77feb354c5ac95d6\": container with ID starting with 1152dc730452b5d69114db323e3865ce14b7afc3ef95e21a77feb354c5ac95d6 not found: ID does not exist" Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.494725 4661 scope.go:117] "RemoveContainer" containerID="64a4da7bf71845c3d8b3620b970976382f7a481b5c6a4b56459cd1fc59bf19b5" Oct 01 06:59:50 crc kubenswrapper[4661]: E1001 06:59:50.494969 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64a4da7bf71845c3d8b3620b970976382f7a481b5c6a4b56459cd1fc59bf19b5\": container with ID starting with 64a4da7bf71845c3d8b3620b970976382f7a481b5c6a4b56459cd1fc59bf19b5 not found: ID does not exist" containerID="64a4da7bf71845c3d8b3620b970976382f7a481b5c6a4b56459cd1fc59bf19b5" Oct 01 06:59:50 crc kubenswrapper[4661]: I1001 06:59:50.494993 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64a4da7bf71845c3d8b3620b970976382f7a481b5c6a4b56459cd1fc59bf19b5"} err="failed to get container status \"64a4da7bf71845c3d8b3620b970976382f7a481b5c6a4b56459cd1fc59bf19b5\": rpc error: code = NotFound desc = could not find container \"64a4da7bf71845c3d8b3620b970976382f7a481b5c6a4b56459cd1fc59bf19b5\": container with ID starting with 64a4da7bf71845c3d8b3620b970976382f7a481b5c6a4b56459cd1fc59bf19b5 not found: ID does not exist" Oct 01 06:59:51 crc kubenswrapper[4661]: I1001 06:59:51.783816 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="180e89f5-bf97-4bef-be9a-cd2d44271a60" path="/var/lib/kubelet/pods/180e89f5-bf97-4bef-be9a-cd2d44271a60/volumes" Oct 01 07:00:00 crc kubenswrapper[4661]: I1001 07:00:00.171179 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321700-wlbm5"] Oct 01 07:00:00 crc kubenswrapper[4661]: E1001 07:00:00.172189 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="180e89f5-bf97-4bef-be9a-cd2d44271a60" containerName="extract-content" Oct 01 07:00:00 crc kubenswrapper[4661]: I1001 07:00:00.172203 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="180e89f5-bf97-4bef-be9a-cd2d44271a60" containerName="extract-content" Oct 01 07:00:00 crc kubenswrapper[4661]: E1001 07:00:00.172237 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="180e89f5-bf97-4bef-be9a-cd2d44271a60" containerName="extract-utilities" Oct 01 07:00:00 crc kubenswrapper[4661]: I1001 07:00:00.172243 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="180e89f5-bf97-4bef-be9a-cd2d44271a60" containerName="extract-utilities" Oct 01 07:00:00 crc kubenswrapper[4661]: E1001 07:00:00.172269 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="180e89f5-bf97-4bef-be9a-cd2d44271a60" containerName="registry-server" Oct 01 07:00:00 crc kubenswrapper[4661]: I1001 07:00:00.172277 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="180e89f5-bf97-4bef-be9a-cd2d44271a60" containerName="registry-server" Oct 01 07:00:00 crc kubenswrapper[4661]: I1001 07:00:00.172492 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="180e89f5-bf97-4bef-be9a-cd2d44271a60" containerName="registry-server" Oct 01 07:00:00 crc kubenswrapper[4661]: I1001 07:00:00.173225 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321700-wlbm5" Oct 01 07:00:00 crc kubenswrapper[4661]: I1001 07:00:00.176122 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 07:00:00 crc kubenswrapper[4661]: I1001 07:00:00.177204 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 07:00:00 crc kubenswrapper[4661]: I1001 07:00:00.184625 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321700-wlbm5"] Oct 01 07:00:00 crc kubenswrapper[4661]: I1001 07:00:00.253130 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6723e7d1-1ee4-477d-8fc2-273891e07bdf-config-volume\") pod \"collect-profiles-29321700-wlbm5\" (UID: \"6723e7d1-1ee4-477d-8fc2-273891e07bdf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321700-wlbm5" Oct 01 07:00:00 crc kubenswrapper[4661]: I1001 07:00:00.253478 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqsgm\" (UniqueName: \"kubernetes.io/projected/6723e7d1-1ee4-477d-8fc2-273891e07bdf-kube-api-access-nqsgm\") pod \"collect-profiles-29321700-wlbm5\" (UID: \"6723e7d1-1ee4-477d-8fc2-273891e07bdf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321700-wlbm5" Oct 01 07:00:00 crc kubenswrapper[4661]: I1001 07:00:00.253839 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6723e7d1-1ee4-477d-8fc2-273891e07bdf-secret-volume\") pod \"collect-profiles-29321700-wlbm5\" (UID: \"6723e7d1-1ee4-477d-8fc2-273891e07bdf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321700-wlbm5" Oct 01 07:00:00 crc kubenswrapper[4661]: I1001 07:00:00.355844 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6723e7d1-1ee4-477d-8fc2-273891e07bdf-config-volume\") pod \"collect-profiles-29321700-wlbm5\" (UID: \"6723e7d1-1ee4-477d-8fc2-273891e07bdf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321700-wlbm5" Oct 01 07:00:00 crc kubenswrapper[4661]: I1001 07:00:00.355951 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqsgm\" (UniqueName: \"kubernetes.io/projected/6723e7d1-1ee4-477d-8fc2-273891e07bdf-kube-api-access-nqsgm\") pod \"collect-profiles-29321700-wlbm5\" (UID: \"6723e7d1-1ee4-477d-8fc2-273891e07bdf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321700-wlbm5" Oct 01 07:00:00 crc kubenswrapper[4661]: I1001 07:00:00.356141 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6723e7d1-1ee4-477d-8fc2-273891e07bdf-secret-volume\") pod \"collect-profiles-29321700-wlbm5\" (UID: \"6723e7d1-1ee4-477d-8fc2-273891e07bdf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321700-wlbm5" Oct 01 07:00:00 crc kubenswrapper[4661]: I1001 07:00:00.356867 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6723e7d1-1ee4-477d-8fc2-273891e07bdf-config-volume\") pod \"collect-profiles-29321700-wlbm5\" (UID: \"6723e7d1-1ee4-477d-8fc2-273891e07bdf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321700-wlbm5" Oct 01 07:00:00 crc kubenswrapper[4661]: I1001 07:00:00.364525 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6723e7d1-1ee4-477d-8fc2-273891e07bdf-secret-volume\") pod \"collect-profiles-29321700-wlbm5\" (UID: \"6723e7d1-1ee4-477d-8fc2-273891e07bdf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321700-wlbm5" Oct 01 07:00:00 crc kubenswrapper[4661]: I1001 07:00:00.386808 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqsgm\" (UniqueName: \"kubernetes.io/projected/6723e7d1-1ee4-477d-8fc2-273891e07bdf-kube-api-access-nqsgm\") pod \"collect-profiles-29321700-wlbm5\" (UID: \"6723e7d1-1ee4-477d-8fc2-273891e07bdf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321700-wlbm5" Oct 01 07:00:00 crc kubenswrapper[4661]: I1001 07:00:00.536748 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321700-wlbm5" Oct 01 07:00:01 crc kubenswrapper[4661]: I1001 07:00:01.047363 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321700-wlbm5"] Oct 01 07:00:01 crc kubenswrapper[4661]: I1001 07:00:01.462992 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321700-wlbm5" event={"ID":"6723e7d1-1ee4-477d-8fc2-273891e07bdf","Type":"ContainerStarted","Data":"86f7f5d4d583a37a83273d2d33d98ceb07ba9ea333ccccc5af748c277dc6ba71"} Oct 01 07:00:01 crc kubenswrapper[4661]: I1001 07:00:01.463233 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321700-wlbm5" event={"ID":"6723e7d1-1ee4-477d-8fc2-273891e07bdf","Type":"ContainerStarted","Data":"bd560456d9dd1b1abeedfaac31cf47359ff31bb7e8cbbe3939a926ab0cce61a0"} Oct 01 07:00:01 crc kubenswrapper[4661]: I1001 07:00:01.480847 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29321700-wlbm5" podStartSLOduration=1.480828921 podStartE2EDuration="1.480828921s" podCreationTimestamp="2025-10-01 07:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 07:00:01.477320855 +0000 UTC m=+5450.415299469" watchObservedRunningTime="2025-10-01 07:00:01.480828921 +0000 UTC m=+5450.418807525" Oct 01 07:00:02 crc kubenswrapper[4661]: I1001 07:00:02.474423 4661 generic.go:334] "Generic (PLEG): container finished" podID="6723e7d1-1ee4-477d-8fc2-273891e07bdf" containerID="86f7f5d4d583a37a83273d2d33d98ceb07ba9ea333ccccc5af748c277dc6ba71" exitCode=0 Oct 01 07:00:02 crc kubenswrapper[4661]: I1001 07:00:02.474843 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321700-wlbm5" event={"ID":"6723e7d1-1ee4-477d-8fc2-273891e07bdf","Type":"ContainerDied","Data":"86f7f5d4d583a37a83273d2d33d98ceb07ba9ea333ccccc5af748c277dc6ba71"} Oct 01 07:00:03 crc kubenswrapper[4661]: I1001 07:00:03.920136 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321700-wlbm5" Oct 01 07:00:04 crc kubenswrapper[4661]: I1001 07:00:04.033547 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6723e7d1-1ee4-477d-8fc2-273891e07bdf-config-volume\") pod \"6723e7d1-1ee4-477d-8fc2-273891e07bdf\" (UID: \"6723e7d1-1ee4-477d-8fc2-273891e07bdf\") " Oct 01 07:00:04 crc kubenswrapper[4661]: I1001 07:00:04.033866 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6723e7d1-1ee4-477d-8fc2-273891e07bdf-secret-volume\") pod \"6723e7d1-1ee4-477d-8fc2-273891e07bdf\" (UID: \"6723e7d1-1ee4-477d-8fc2-273891e07bdf\") " Oct 01 07:00:04 crc kubenswrapper[4661]: I1001 07:00:04.034033 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nqsgm\" (UniqueName: \"kubernetes.io/projected/6723e7d1-1ee4-477d-8fc2-273891e07bdf-kube-api-access-nqsgm\") pod \"6723e7d1-1ee4-477d-8fc2-273891e07bdf\" (UID: \"6723e7d1-1ee4-477d-8fc2-273891e07bdf\") " Oct 01 07:00:04 crc kubenswrapper[4661]: I1001 07:00:04.035037 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6723e7d1-1ee4-477d-8fc2-273891e07bdf-config-volume" (OuterVolumeSpecName: "config-volume") pod "6723e7d1-1ee4-477d-8fc2-273891e07bdf" (UID: "6723e7d1-1ee4-477d-8fc2-273891e07bdf"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 07:00:04 crc kubenswrapper[4661]: I1001 07:00:04.052112 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6723e7d1-1ee4-477d-8fc2-273891e07bdf-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6723e7d1-1ee4-477d-8fc2-273891e07bdf" (UID: "6723e7d1-1ee4-477d-8fc2-273891e07bdf"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 07:00:04 crc kubenswrapper[4661]: I1001 07:00:04.052545 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6723e7d1-1ee4-477d-8fc2-273891e07bdf-kube-api-access-nqsgm" (OuterVolumeSpecName: "kube-api-access-nqsgm") pod "6723e7d1-1ee4-477d-8fc2-273891e07bdf" (UID: "6723e7d1-1ee4-477d-8fc2-273891e07bdf"). InnerVolumeSpecName "kube-api-access-nqsgm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 07:00:04 crc kubenswrapper[4661]: I1001 07:00:04.137022 4661 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6723e7d1-1ee4-477d-8fc2-273891e07bdf-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 07:00:04 crc kubenswrapper[4661]: I1001 07:00:04.137086 4661 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6723e7d1-1ee4-477d-8fc2-273891e07bdf-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 07:00:04 crc kubenswrapper[4661]: I1001 07:00:04.137107 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nqsgm\" (UniqueName: \"kubernetes.io/projected/6723e7d1-1ee4-477d-8fc2-273891e07bdf-kube-api-access-nqsgm\") on node \"crc\" DevicePath \"\"" Oct 01 07:00:04 crc kubenswrapper[4661]: I1001 07:00:04.308932 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 07:00:04 crc kubenswrapper[4661]: I1001 07:00:04.308994 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 07:00:04 crc kubenswrapper[4661]: I1001 07:00:04.499209 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321700-wlbm5" event={"ID":"6723e7d1-1ee4-477d-8fc2-273891e07bdf","Type":"ContainerDied","Data":"bd560456d9dd1b1abeedfaac31cf47359ff31bb7e8cbbe3939a926ab0cce61a0"} Oct 01 07:00:04 crc kubenswrapper[4661]: I1001 07:00:04.499245 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321700-wlbm5" Oct 01 07:00:04 crc kubenswrapper[4661]: I1001 07:00:04.499253 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bd560456d9dd1b1abeedfaac31cf47359ff31bb7e8cbbe3939a926ab0cce61a0" Oct 01 07:00:04 crc kubenswrapper[4661]: I1001 07:00:04.583325 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321655-xdvw5"] Oct 01 07:00:04 crc kubenswrapper[4661]: I1001 07:00:04.591430 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321655-xdvw5"] Oct 01 07:00:05 crc kubenswrapper[4661]: I1001 07:00:05.777621 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="768189cb-583f-4e7f-b9e3-4f883491857a" path="/var/lib/kubelet/pods/768189cb-583f-4e7f-b9e3-4f883491857a/volumes" Oct 01 07:00:24 crc kubenswrapper[4661]: I1001 07:00:24.813452 4661 scope.go:117] "RemoveContainer" containerID="38601fdc47b9d0d435af274d1aebaa2138d8733ff3d433ee91b52f8bf6489988" Oct 01 07:00:34 crc kubenswrapper[4661]: I1001 07:00:34.309217 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 07:00:34 crc kubenswrapper[4661]: I1001 07:00:34.309970 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 07:00:34 crc kubenswrapper[4661]: I1001 07:00:34.310036 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 07:00:34 crc kubenswrapper[4661]: I1001 07:00:34.311093 4661 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ddf9c0cd86e3b7f47956b2704aec5adc69d4333bced181592428d3a7e762e724"} pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 07:00:34 crc kubenswrapper[4661]: I1001 07:00:34.311185 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" containerID="cri-o://ddf9c0cd86e3b7f47956b2704aec5adc69d4333bced181592428d3a7e762e724" gracePeriod=600 Oct 01 07:00:34 crc kubenswrapper[4661]: I1001 07:00:34.866828 4661 generic.go:334] "Generic (PLEG): container finished" podID="7584c4bc-4202-487e-a2b4-4319f428a792" containerID="ddf9c0cd86e3b7f47956b2704aec5adc69d4333bced181592428d3a7e762e724" exitCode=0 Oct 01 07:00:34 crc kubenswrapper[4661]: I1001 07:00:34.866900 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerDied","Data":"ddf9c0cd86e3b7f47956b2704aec5adc69d4333bced181592428d3a7e762e724"} Oct 01 07:00:34 crc kubenswrapper[4661]: I1001 07:00:34.867212 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743"} Oct 01 07:00:34 crc kubenswrapper[4661]: I1001 07:00:34.867236 4661 scope.go:117] "RemoveContainer" containerID="28bd2f7118f9152ac0d09799dee558eed66d81a5985b9a7f36427428b8756e01" Oct 01 07:01:00 crc kubenswrapper[4661]: I1001 07:01:00.167914 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29321701-zsg45"] Oct 01 07:01:00 crc kubenswrapper[4661]: E1001 07:01:00.169524 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6723e7d1-1ee4-477d-8fc2-273891e07bdf" containerName="collect-profiles" Oct 01 07:01:00 crc kubenswrapper[4661]: I1001 07:01:00.169548 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6723e7d1-1ee4-477d-8fc2-273891e07bdf" containerName="collect-profiles" Oct 01 07:01:00 crc kubenswrapper[4661]: I1001 07:01:00.169829 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="6723e7d1-1ee4-477d-8fc2-273891e07bdf" containerName="collect-profiles" Oct 01 07:01:00 crc kubenswrapper[4661]: I1001 07:01:00.170720 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29321701-zsg45" Oct 01 07:01:00 crc kubenswrapper[4661]: I1001 07:01:00.180493 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29321701-zsg45"] Oct 01 07:01:00 crc kubenswrapper[4661]: I1001 07:01:00.303142 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-fernet-keys\") pod \"keystone-cron-29321701-zsg45\" (UID: \"850d09d7-51e1-4b5f-a53e-5d39cc38dac2\") " pod="openstack/keystone-cron-29321701-zsg45" Oct 01 07:01:00 crc kubenswrapper[4661]: I1001 07:01:00.303442 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-combined-ca-bundle\") pod \"keystone-cron-29321701-zsg45\" (UID: \"850d09d7-51e1-4b5f-a53e-5d39cc38dac2\") " pod="openstack/keystone-cron-29321701-zsg45" Oct 01 07:01:00 crc kubenswrapper[4661]: I1001 07:01:00.303570 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrrct\" (UniqueName: \"kubernetes.io/projected/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-kube-api-access-zrrct\") pod \"keystone-cron-29321701-zsg45\" (UID: \"850d09d7-51e1-4b5f-a53e-5d39cc38dac2\") " pod="openstack/keystone-cron-29321701-zsg45" Oct 01 07:01:00 crc kubenswrapper[4661]: I1001 07:01:00.303821 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-config-data\") pod \"keystone-cron-29321701-zsg45\" (UID: \"850d09d7-51e1-4b5f-a53e-5d39cc38dac2\") " pod="openstack/keystone-cron-29321701-zsg45" Oct 01 07:01:00 crc kubenswrapper[4661]: I1001 07:01:00.405499 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-fernet-keys\") pod \"keystone-cron-29321701-zsg45\" (UID: \"850d09d7-51e1-4b5f-a53e-5d39cc38dac2\") " pod="openstack/keystone-cron-29321701-zsg45" Oct 01 07:01:00 crc kubenswrapper[4661]: I1001 07:01:00.405814 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-combined-ca-bundle\") pod \"keystone-cron-29321701-zsg45\" (UID: \"850d09d7-51e1-4b5f-a53e-5d39cc38dac2\") " pod="openstack/keystone-cron-29321701-zsg45" Oct 01 07:01:00 crc kubenswrapper[4661]: I1001 07:01:00.406014 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrrct\" (UniqueName: \"kubernetes.io/projected/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-kube-api-access-zrrct\") pod \"keystone-cron-29321701-zsg45\" (UID: \"850d09d7-51e1-4b5f-a53e-5d39cc38dac2\") " pod="openstack/keystone-cron-29321701-zsg45" Oct 01 07:01:00 crc kubenswrapper[4661]: I1001 07:01:00.406309 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-config-data\") pod \"keystone-cron-29321701-zsg45\" (UID: \"850d09d7-51e1-4b5f-a53e-5d39cc38dac2\") " pod="openstack/keystone-cron-29321701-zsg45" Oct 01 07:01:00 crc kubenswrapper[4661]: I1001 07:01:00.415283 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-combined-ca-bundle\") pod \"keystone-cron-29321701-zsg45\" (UID: \"850d09d7-51e1-4b5f-a53e-5d39cc38dac2\") " pod="openstack/keystone-cron-29321701-zsg45" Oct 01 07:01:00 crc kubenswrapper[4661]: I1001 07:01:00.416870 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-config-data\") pod \"keystone-cron-29321701-zsg45\" (UID: \"850d09d7-51e1-4b5f-a53e-5d39cc38dac2\") " pod="openstack/keystone-cron-29321701-zsg45" Oct 01 07:01:00 crc kubenswrapper[4661]: I1001 07:01:00.417960 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-fernet-keys\") pod \"keystone-cron-29321701-zsg45\" (UID: \"850d09d7-51e1-4b5f-a53e-5d39cc38dac2\") " pod="openstack/keystone-cron-29321701-zsg45" Oct 01 07:01:00 crc kubenswrapper[4661]: I1001 07:01:00.434755 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrrct\" (UniqueName: \"kubernetes.io/projected/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-kube-api-access-zrrct\") pod \"keystone-cron-29321701-zsg45\" (UID: \"850d09d7-51e1-4b5f-a53e-5d39cc38dac2\") " pod="openstack/keystone-cron-29321701-zsg45" Oct 01 07:01:00 crc kubenswrapper[4661]: I1001 07:01:00.501057 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29321701-zsg45" Oct 01 07:01:01 crc kubenswrapper[4661]: I1001 07:01:01.066226 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29321701-zsg45"] Oct 01 07:01:01 crc kubenswrapper[4661]: I1001 07:01:01.202057 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29321701-zsg45" event={"ID":"850d09d7-51e1-4b5f-a53e-5d39cc38dac2","Type":"ContainerStarted","Data":"d41d1609899a2845849e750b3a041a6e0c3cc3e3828ef88621058f319787f147"} Oct 01 07:01:02 crc kubenswrapper[4661]: I1001 07:01:02.221684 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29321701-zsg45" event={"ID":"850d09d7-51e1-4b5f-a53e-5d39cc38dac2","Type":"ContainerStarted","Data":"dc0f6762edf55759e7569ff80109bd77e4324d878c56afe97461309b94c356a5"} Oct 01 07:01:02 crc kubenswrapper[4661]: I1001 07:01:02.249044 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29321701-zsg45" podStartSLOduration=2.249024846 podStartE2EDuration="2.249024846s" podCreationTimestamp="2025-10-01 07:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 07:01:02.242266701 +0000 UTC m=+5511.180245355" watchObservedRunningTime="2025-10-01 07:01:02.249024846 +0000 UTC m=+5511.187003450" Oct 01 07:01:05 crc kubenswrapper[4661]: I1001 07:01:05.268860 4661 generic.go:334] "Generic (PLEG): container finished" podID="850d09d7-51e1-4b5f-a53e-5d39cc38dac2" containerID="dc0f6762edf55759e7569ff80109bd77e4324d878c56afe97461309b94c356a5" exitCode=0 Oct 01 07:01:05 crc kubenswrapper[4661]: I1001 07:01:05.268915 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29321701-zsg45" event={"ID":"850d09d7-51e1-4b5f-a53e-5d39cc38dac2","Type":"ContainerDied","Data":"dc0f6762edf55759e7569ff80109bd77e4324d878c56afe97461309b94c356a5"} Oct 01 07:01:06 crc kubenswrapper[4661]: I1001 07:01:06.775234 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29321701-zsg45" Oct 01 07:01:06 crc kubenswrapper[4661]: I1001 07:01:06.854799 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-fernet-keys\") pod \"850d09d7-51e1-4b5f-a53e-5d39cc38dac2\" (UID: \"850d09d7-51e1-4b5f-a53e-5d39cc38dac2\") " Oct 01 07:01:06 crc kubenswrapper[4661]: I1001 07:01:06.854840 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrrct\" (UniqueName: \"kubernetes.io/projected/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-kube-api-access-zrrct\") pod \"850d09d7-51e1-4b5f-a53e-5d39cc38dac2\" (UID: \"850d09d7-51e1-4b5f-a53e-5d39cc38dac2\") " Oct 01 07:01:06 crc kubenswrapper[4661]: I1001 07:01:06.854957 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-config-data\") pod \"850d09d7-51e1-4b5f-a53e-5d39cc38dac2\" (UID: \"850d09d7-51e1-4b5f-a53e-5d39cc38dac2\") " Oct 01 07:01:06 crc kubenswrapper[4661]: I1001 07:01:06.855076 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-combined-ca-bundle\") pod \"850d09d7-51e1-4b5f-a53e-5d39cc38dac2\" (UID: \"850d09d7-51e1-4b5f-a53e-5d39cc38dac2\") " Oct 01 07:01:06 crc kubenswrapper[4661]: I1001 07:01:06.861500 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-kube-api-access-zrrct" (OuterVolumeSpecName: "kube-api-access-zrrct") pod "850d09d7-51e1-4b5f-a53e-5d39cc38dac2" (UID: "850d09d7-51e1-4b5f-a53e-5d39cc38dac2"). InnerVolumeSpecName "kube-api-access-zrrct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 07:01:06 crc kubenswrapper[4661]: I1001 07:01:06.875426 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "850d09d7-51e1-4b5f-a53e-5d39cc38dac2" (UID: "850d09d7-51e1-4b5f-a53e-5d39cc38dac2"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 07:01:06 crc kubenswrapper[4661]: I1001 07:01:06.886919 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "850d09d7-51e1-4b5f-a53e-5d39cc38dac2" (UID: "850d09d7-51e1-4b5f-a53e-5d39cc38dac2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 07:01:06 crc kubenswrapper[4661]: I1001 07:01:06.934354 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-config-data" (OuterVolumeSpecName: "config-data") pod "850d09d7-51e1-4b5f-a53e-5d39cc38dac2" (UID: "850d09d7-51e1-4b5f-a53e-5d39cc38dac2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 07:01:06 crc kubenswrapper[4661]: I1001 07:01:06.957459 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 07:01:06 crc kubenswrapper[4661]: I1001 07:01:06.957492 4661 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 07:01:06 crc kubenswrapper[4661]: I1001 07:01:06.957505 4661 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 01 07:01:06 crc kubenswrapper[4661]: I1001 07:01:06.957514 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrrct\" (UniqueName: \"kubernetes.io/projected/850d09d7-51e1-4b5f-a53e-5d39cc38dac2-kube-api-access-zrrct\") on node \"crc\" DevicePath \"\"" Oct 01 07:01:07 crc kubenswrapper[4661]: I1001 07:01:07.298699 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29321701-zsg45" event={"ID":"850d09d7-51e1-4b5f-a53e-5d39cc38dac2","Type":"ContainerDied","Data":"d41d1609899a2845849e750b3a041a6e0c3cc3e3828ef88621058f319787f147"} Oct 01 07:01:07 crc kubenswrapper[4661]: I1001 07:01:07.298797 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d41d1609899a2845849e750b3a041a6e0c3cc3e3828ef88621058f319787f147" Oct 01 07:01:07 crc kubenswrapper[4661]: I1001 07:01:07.298916 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29321701-zsg45" Oct 01 07:02:34 crc kubenswrapper[4661]: I1001 07:02:34.309787 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 07:02:34 crc kubenswrapper[4661]: I1001 07:02:34.310583 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 07:03:04 crc kubenswrapper[4661]: I1001 07:03:04.309022 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 07:03:04 crc kubenswrapper[4661]: I1001 07:03:04.309867 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 07:03:34 crc kubenswrapper[4661]: I1001 07:03:34.309860 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 07:03:34 crc kubenswrapper[4661]: I1001 07:03:34.310524 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 07:03:34 crc kubenswrapper[4661]: I1001 07:03:34.310593 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 07:03:34 crc kubenswrapper[4661]: I1001 07:03:34.311732 4661 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743"} pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 07:03:34 crc kubenswrapper[4661]: I1001 07:03:34.311829 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" containerID="cri-o://dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" gracePeriod=600 Oct 01 07:03:34 crc kubenswrapper[4661]: E1001 07:03:34.436741 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:03:35 crc kubenswrapper[4661]: I1001 07:03:35.131238 4661 generic.go:334] "Generic (PLEG): container finished" podID="7584c4bc-4202-487e-a2b4-4319f428a792" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" exitCode=0 Oct 01 07:03:35 crc kubenswrapper[4661]: I1001 07:03:35.131562 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerDied","Data":"dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743"} Oct 01 07:03:35 crc kubenswrapper[4661]: I1001 07:03:35.131595 4661 scope.go:117] "RemoveContainer" containerID="ddf9c0cd86e3b7f47956b2704aec5adc69d4333bced181592428d3a7e762e724" Oct 01 07:03:35 crc kubenswrapper[4661]: I1001 07:03:35.131994 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:03:35 crc kubenswrapper[4661]: E1001 07:03:35.132230 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:03:49 crc kubenswrapper[4661]: I1001 07:03:49.758082 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:03:49 crc kubenswrapper[4661]: E1001 07:03:49.759313 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:04:04 crc kubenswrapper[4661]: I1001 07:04:04.756747 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:04:04 crc kubenswrapper[4661]: E1001 07:04:04.757546 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:04:16 crc kubenswrapper[4661]: I1001 07:04:16.757625 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:04:16 crc kubenswrapper[4661]: E1001 07:04:16.758730 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:04:28 crc kubenswrapper[4661]: I1001 07:04:28.758218 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:04:28 crc kubenswrapper[4661]: E1001 07:04:28.759027 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:04:40 crc kubenswrapper[4661]: I1001 07:04:40.245845 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8rdm7"] Oct 01 07:04:40 crc kubenswrapper[4661]: E1001 07:04:40.246817 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="850d09d7-51e1-4b5f-a53e-5d39cc38dac2" containerName="keystone-cron" Oct 01 07:04:40 crc kubenswrapper[4661]: I1001 07:04:40.246833 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="850d09d7-51e1-4b5f-a53e-5d39cc38dac2" containerName="keystone-cron" Oct 01 07:04:40 crc kubenswrapper[4661]: I1001 07:04:40.247048 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="850d09d7-51e1-4b5f-a53e-5d39cc38dac2" containerName="keystone-cron" Oct 01 07:04:40 crc kubenswrapper[4661]: I1001 07:04:40.248389 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8rdm7" Oct 01 07:04:40 crc kubenswrapper[4661]: I1001 07:04:40.262610 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8rdm7"] Oct 01 07:04:40 crc kubenswrapper[4661]: I1001 07:04:40.434746 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnhh7\" (UniqueName: \"kubernetes.io/projected/6347dd3e-2545-4894-9146-45336d89695a-kube-api-access-wnhh7\") pod \"redhat-operators-8rdm7\" (UID: \"6347dd3e-2545-4894-9146-45336d89695a\") " pod="openshift-marketplace/redhat-operators-8rdm7" Oct 01 07:04:40 crc kubenswrapper[4661]: I1001 07:04:40.434869 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6347dd3e-2545-4894-9146-45336d89695a-utilities\") pod \"redhat-operators-8rdm7\" (UID: \"6347dd3e-2545-4894-9146-45336d89695a\") " pod="openshift-marketplace/redhat-operators-8rdm7" Oct 01 07:04:40 crc kubenswrapper[4661]: I1001 07:04:40.434895 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6347dd3e-2545-4894-9146-45336d89695a-catalog-content\") pod \"redhat-operators-8rdm7\" (UID: \"6347dd3e-2545-4894-9146-45336d89695a\") " pod="openshift-marketplace/redhat-operators-8rdm7" Oct 01 07:04:40 crc kubenswrapper[4661]: I1001 07:04:40.537015 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6347dd3e-2545-4894-9146-45336d89695a-utilities\") pod \"redhat-operators-8rdm7\" (UID: \"6347dd3e-2545-4894-9146-45336d89695a\") " pod="openshift-marketplace/redhat-operators-8rdm7" Oct 01 07:04:40 crc kubenswrapper[4661]: I1001 07:04:40.537069 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6347dd3e-2545-4894-9146-45336d89695a-catalog-content\") pod \"redhat-operators-8rdm7\" (UID: \"6347dd3e-2545-4894-9146-45336d89695a\") " pod="openshift-marketplace/redhat-operators-8rdm7" Oct 01 07:04:40 crc kubenswrapper[4661]: I1001 07:04:40.537160 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnhh7\" (UniqueName: \"kubernetes.io/projected/6347dd3e-2545-4894-9146-45336d89695a-kube-api-access-wnhh7\") pod \"redhat-operators-8rdm7\" (UID: \"6347dd3e-2545-4894-9146-45336d89695a\") " pod="openshift-marketplace/redhat-operators-8rdm7" Oct 01 07:04:40 crc kubenswrapper[4661]: I1001 07:04:40.537554 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6347dd3e-2545-4894-9146-45336d89695a-utilities\") pod \"redhat-operators-8rdm7\" (UID: \"6347dd3e-2545-4894-9146-45336d89695a\") " pod="openshift-marketplace/redhat-operators-8rdm7" Oct 01 07:04:40 crc kubenswrapper[4661]: I1001 07:04:40.537740 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6347dd3e-2545-4894-9146-45336d89695a-catalog-content\") pod \"redhat-operators-8rdm7\" (UID: \"6347dd3e-2545-4894-9146-45336d89695a\") " pod="openshift-marketplace/redhat-operators-8rdm7" Oct 01 07:04:40 crc kubenswrapper[4661]: I1001 07:04:40.556620 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnhh7\" (UniqueName: \"kubernetes.io/projected/6347dd3e-2545-4894-9146-45336d89695a-kube-api-access-wnhh7\") pod \"redhat-operators-8rdm7\" (UID: \"6347dd3e-2545-4894-9146-45336d89695a\") " pod="openshift-marketplace/redhat-operators-8rdm7" Oct 01 07:04:40 crc kubenswrapper[4661]: I1001 07:04:40.589244 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8rdm7" Oct 01 07:04:41 crc kubenswrapper[4661]: I1001 07:04:41.057530 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8rdm7"] Oct 01 07:04:41 crc kubenswrapper[4661]: W1001 07:04:41.722908 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6347dd3e_2545_4894_9146_45336d89695a.slice/crio-c82ae278ac88946f1802adfc9e9bc076b762a37acba65f0c9c2d2968c5cfe252 WatchSource:0}: Error finding container c82ae278ac88946f1802adfc9e9bc076b762a37acba65f0c9c2d2968c5cfe252: Status 404 returned error can't find the container with id c82ae278ac88946f1802adfc9e9bc076b762a37acba65f0c9c2d2968c5cfe252 Oct 01 07:04:41 crc kubenswrapper[4661]: I1001 07:04:41.883733 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8rdm7" event={"ID":"6347dd3e-2545-4894-9146-45336d89695a","Type":"ContainerStarted","Data":"c82ae278ac88946f1802adfc9e9bc076b762a37acba65f0c9c2d2968c5cfe252"} Oct 01 07:04:42 crc kubenswrapper[4661]: I1001 07:04:42.900648 4661 generic.go:334] "Generic (PLEG): container finished" podID="6347dd3e-2545-4894-9146-45336d89695a" containerID="ef3f8546b945e08868dcdcb649439482c768903e2ae6ed691f42d43e3a875eb0" exitCode=0 Oct 01 07:04:42 crc kubenswrapper[4661]: I1001 07:04:42.900751 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8rdm7" event={"ID":"6347dd3e-2545-4894-9146-45336d89695a","Type":"ContainerDied","Data":"ef3f8546b945e08868dcdcb649439482c768903e2ae6ed691f42d43e3a875eb0"} Oct 01 07:04:42 crc kubenswrapper[4661]: I1001 07:04:42.904750 4661 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 07:04:43 crc kubenswrapper[4661]: I1001 07:04:43.757576 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:04:43 crc kubenswrapper[4661]: E1001 07:04:43.758200 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:04:44 crc kubenswrapper[4661]: I1001 07:04:44.943625 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8rdm7" event={"ID":"6347dd3e-2545-4894-9146-45336d89695a","Type":"ContainerStarted","Data":"bb1d01ff1e903e0f3e89af8bc4e5afa0dd758ef6ba3c41ec3dbe1f18ebc366c6"} Oct 01 07:04:45 crc kubenswrapper[4661]: I1001 07:04:45.962116 4661 generic.go:334] "Generic (PLEG): container finished" podID="6347dd3e-2545-4894-9146-45336d89695a" containerID="bb1d01ff1e903e0f3e89af8bc4e5afa0dd758ef6ba3c41ec3dbe1f18ebc366c6" exitCode=0 Oct 01 07:04:45 crc kubenswrapper[4661]: I1001 07:04:45.962180 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8rdm7" event={"ID":"6347dd3e-2545-4894-9146-45336d89695a","Type":"ContainerDied","Data":"bb1d01ff1e903e0f3e89af8bc4e5afa0dd758ef6ba3c41ec3dbe1f18ebc366c6"} Oct 01 07:04:46 crc kubenswrapper[4661]: I1001 07:04:46.977824 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8rdm7" event={"ID":"6347dd3e-2545-4894-9146-45336d89695a","Type":"ContainerStarted","Data":"4e845cafceb86940725a15423aa8ba689d238a4b0f9a5b069c58d09654db886b"} Oct 01 07:04:47 crc kubenswrapper[4661]: I1001 07:04:47.019263 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8rdm7" podStartSLOduration=3.528139254 podStartE2EDuration="7.019243909s" podCreationTimestamp="2025-10-01 07:04:40 +0000 UTC" firstStartedPulling="2025-10-01 07:04:42.904236346 +0000 UTC m=+5731.842215000" lastFinishedPulling="2025-10-01 07:04:46.395341031 +0000 UTC m=+5735.333319655" observedRunningTime="2025-10-01 07:04:47.013620115 +0000 UTC m=+5735.951598749" watchObservedRunningTime="2025-10-01 07:04:47.019243909 +0000 UTC m=+5735.957222543" Oct 01 07:04:47 crc kubenswrapper[4661]: I1001 07:04:47.995171 4661 generic.go:334] "Generic (PLEG): container finished" podID="ff1ca911-a470-4bfb-8cc2-3f76257eed1f" containerID="0d5204703699537944e4365a5d393794783bdae86fcc798530908f76cf9e56af" exitCode=0 Oct 01 07:04:47 crc kubenswrapper[4661]: I1001 07:04:47.995331 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"ff1ca911-a470-4bfb-8cc2-3f76257eed1f","Type":"ContainerDied","Data":"0d5204703699537944e4365a5d393794783bdae86fcc798530908f76cf9e56af"} Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.405405 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.555362 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pgw65\" (UniqueName: \"kubernetes.io/projected/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-kube-api-access-pgw65\") pod \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.555468 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-openstack-config-secret\") pod \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.555527 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.555555 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-config-data\") pod \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.555721 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-test-operator-ephemeral-temporary\") pod \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.555768 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-openstack-config\") pod \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.555843 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-test-operator-ephemeral-workdir\") pod \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.555892 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-ssh-key\") pod \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.555922 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-ca-certs\") pod \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\" (UID: \"ff1ca911-a470-4bfb-8cc2-3f76257eed1f\") " Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.556259 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "ff1ca911-a470-4bfb-8cc2-3f76257eed1f" (UID: "ff1ca911-a470-4bfb-8cc2-3f76257eed1f"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.556592 4661 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.556773 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-config-data" (OuterVolumeSpecName: "config-data") pod "ff1ca911-a470-4bfb-8cc2-3f76257eed1f" (UID: "ff1ca911-a470-4bfb-8cc2-3f76257eed1f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.562278 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-kube-api-access-pgw65" (OuterVolumeSpecName: "kube-api-access-pgw65") pod "ff1ca911-a470-4bfb-8cc2-3f76257eed1f" (UID: "ff1ca911-a470-4bfb-8cc2-3f76257eed1f"). InnerVolumeSpecName "kube-api-access-pgw65". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.563762 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "ff1ca911-a470-4bfb-8cc2-3f76257eed1f" (UID: "ff1ca911-a470-4bfb-8cc2-3f76257eed1f"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.576834 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "test-operator-logs") pod "ff1ca911-a470-4bfb-8cc2-3f76257eed1f" (UID: "ff1ca911-a470-4bfb-8cc2-3f76257eed1f"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.585768 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "ff1ca911-a470-4bfb-8cc2-3f76257eed1f" (UID: "ff1ca911-a470-4bfb-8cc2-3f76257eed1f"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.600173 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "ff1ca911-a470-4bfb-8cc2-3f76257eed1f" (UID: "ff1ca911-a470-4bfb-8cc2-3f76257eed1f"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.611154 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ff1ca911-a470-4bfb-8cc2-3f76257eed1f" (UID: "ff1ca911-a470-4bfb-8cc2-3f76257eed1f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.650749 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "ff1ca911-a470-4bfb-8cc2-3f76257eed1f" (UID: "ff1ca911-a470-4bfb-8cc2-3f76257eed1f"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.660042 4661 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.660123 4661 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.660144 4661 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.660163 4661 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.660182 4661 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.660201 4661 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.660273 4661 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-ca-certs\") on node \"crc\" DevicePath \"\"" Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.660293 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pgw65\" (UniqueName: \"kubernetes.io/projected/ff1ca911-a470-4bfb-8cc2-3f76257eed1f-kube-api-access-pgw65\") on node \"crc\" DevicePath \"\"" Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.705270 4661 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Oct 01 07:04:49 crc kubenswrapper[4661]: I1001 07:04:49.765178 4661 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Oct 01 07:04:50 crc kubenswrapper[4661]: I1001 07:04:50.024111 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"ff1ca911-a470-4bfb-8cc2-3f76257eed1f","Type":"ContainerDied","Data":"6f56d09f533ef75a4725d6def602650669888641b9d903a73ca51e4d1f9dd78b"} Oct 01 07:04:50 crc kubenswrapper[4661]: I1001 07:04:50.024161 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f56d09f533ef75a4725d6def602650669888641b9d903a73ca51e4d1f9dd78b" Oct 01 07:04:50 crc kubenswrapper[4661]: I1001 07:04:50.024233 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 01 07:04:50 crc kubenswrapper[4661]: I1001 07:04:50.589406 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8rdm7" Oct 01 07:04:50 crc kubenswrapper[4661]: I1001 07:04:50.589543 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8rdm7" Oct 01 07:04:51 crc kubenswrapper[4661]: I1001 07:04:51.689064 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-8rdm7" podUID="6347dd3e-2545-4894-9146-45336d89695a" containerName="registry-server" probeResult="failure" output=< Oct 01 07:04:51 crc kubenswrapper[4661]: timeout: failed to connect service ":50051" within 1s Oct 01 07:04:51 crc kubenswrapper[4661]: > Oct 01 07:04:52 crc kubenswrapper[4661]: I1001 07:04:52.233538 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 01 07:04:52 crc kubenswrapper[4661]: E1001 07:04:52.234327 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff1ca911-a470-4bfb-8cc2-3f76257eed1f" containerName="tempest-tests-tempest-tests-runner" Oct 01 07:04:52 crc kubenswrapper[4661]: I1001 07:04:52.234346 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff1ca911-a470-4bfb-8cc2-3f76257eed1f" containerName="tempest-tests-tempest-tests-runner" Oct 01 07:04:52 crc kubenswrapper[4661]: I1001 07:04:52.234567 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff1ca911-a470-4bfb-8cc2-3f76257eed1f" containerName="tempest-tests-tempest-tests-runner" Oct 01 07:04:52 crc kubenswrapper[4661]: I1001 07:04:52.235277 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 07:04:52 crc kubenswrapper[4661]: I1001 07:04:52.238223 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-vzff5" Oct 01 07:04:52 crc kubenswrapper[4661]: I1001 07:04:52.250536 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 01 07:04:52 crc kubenswrapper[4661]: I1001 07:04:52.419199 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjwb8\" (UniqueName: \"kubernetes.io/projected/1c1b7300-2c13-4f7a-92d8-be7ff9e0374c-kube-api-access-kjwb8\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"1c1b7300-2c13-4f7a-92d8-be7ff9e0374c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 07:04:52 crc kubenswrapper[4661]: I1001 07:04:52.419707 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"1c1b7300-2c13-4f7a-92d8-be7ff9e0374c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 07:04:52 crc kubenswrapper[4661]: I1001 07:04:52.521604 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"1c1b7300-2c13-4f7a-92d8-be7ff9e0374c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 07:04:52 crc kubenswrapper[4661]: I1001 07:04:52.521881 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjwb8\" (UniqueName: \"kubernetes.io/projected/1c1b7300-2c13-4f7a-92d8-be7ff9e0374c-kube-api-access-kjwb8\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"1c1b7300-2c13-4f7a-92d8-be7ff9e0374c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 07:04:52 crc kubenswrapper[4661]: I1001 07:04:52.522117 4661 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"1c1b7300-2c13-4f7a-92d8-be7ff9e0374c\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 07:04:52 crc kubenswrapper[4661]: I1001 07:04:52.554281 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjwb8\" (UniqueName: \"kubernetes.io/projected/1c1b7300-2c13-4f7a-92d8-be7ff9e0374c-kube-api-access-kjwb8\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"1c1b7300-2c13-4f7a-92d8-be7ff9e0374c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 07:04:52 crc kubenswrapper[4661]: I1001 07:04:52.580750 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"1c1b7300-2c13-4f7a-92d8-be7ff9e0374c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 07:04:52 crc kubenswrapper[4661]: I1001 07:04:52.866578 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 07:04:53 crc kubenswrapper[4661]: I1001 07:04:53.181459 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 01 07:04:54 crc kubenswrapper[4661]: I1001 07:04:54.067445 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"1c1b7300-2c13-4f7a-92d8-be7ff9e0374c","Type":"ContainerStarted","Data":"76ed276d961f8d4eadbef2dae2dfc2820884bd17ced074883fc6ce7e934f544b"} Oct 01 07:04:54 crc kubenswrapper[4661]: I1001 07:04:54.757366 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:04:54 crc kubenswrapper[4661]: E1001 07:04:54.757972 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:04:56 crc kubenswrapper[4661]: I1001 07:04:56.094057 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"1c1b7300-2c13-4f7a-92d8-be7ff9e0374c","Type":"ContainerStarted","Data":"c8c7cbfcf0dd2e150095b64d54c7b13743fec9d6e54f8fd23df4782477b76917"} Oct 01 07:04:56 crc kubenswrapper[4661]: I1001 07:04:56.124121 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.306257948 podStartE2EDuration="4.124096607s" podCreationTimestamp="2025-10-01 07:04:52 +0000 UTC" firstStartedPulling="2025-10-01 07:04:53.188273469 +0000 UTC m=+5742.126252083" lastFinishedPulling="2025-10-01 07:04:55.006112078 +0000 UTC m=+5743.944090742" observedRunningTime="2025-10-01 07:04:56.115149552 +0000 UTC m=+5745.053128206" watchObservedRunningTime="2025-10-01 07:04:56.124096607 +0000 UTC m=+5745.062075221" Oct 01 07:05:00 crc kubenswrapper[4661]: I1001 07:05:00.671544 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8rdm7" Oct 01 07:05:00 crc kubenswrapper[4661]: I1001 07:05:00.752577 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8rdm7" Oct 01 07:05:00 crc kubenswrapper[4661]: I1001 07:05:00.916567 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8rdm7"] Oct 01 07:05:02 crc kubenswrapper[4661]: I1001 07:05:02.166009 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8rdm7" podUID="6347dd3e-2545-4894-9146-45336d89695a" containerName="registry-server" containerID="cri-o://4e845cafceb86940725a15423aa8ba689d238a4b0f9a5b069c58d09654db886b" gracePeriod=2 Oct 01 07:05:02 crc kubenswrapper[4661]: I1001 07:05:02.764271 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8rdm7" Oct 01 07:05:02 crc kubenswrapper[4661]: I1001 07:05:02.878902 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6347dd3e-2545-4894-9146-45336d89695a-utilities\") pod \"6347dd3e-2545-4894-9146-45336d89695a\" (UID: \"6347dd3e-2545-4894-9146-45336d89695a\") " Oct 01 07:05:02 crc kubenswrapper[4661]: I1001 07:05:02.879132 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wnhh7\" (UniqueName: \"kubernetes.io/projected/6347dd3e-2545-4894-9146-45336d89695a-kube-api-access-wnhh7\") pod \"6347dd3e-2545-4894-9146-45336d89695a\" (UID: \"6347dd3e-2545-4894-9146-45336d89695a\") " Oct 01 07:05:02 crc kubenswrapper[4661]: I1001 07:05:02.879218 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6347dd3e-2545-4894-9146-45336d89695a-catalog-content\") pod \"6347dd3e-2545-4894-9146-45336d89695a\" (UID: \"6347dd3e-2545-4894-9146-45336d89695a\") " Oct 01 07:05:02 crc kubenswrapper[4661]: I1001 07:05:02.880189 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6347dd3e-2545-4894-9146-45336d89695a-utilities" (OuterVolumeSpecName: "utilities") pod "6347dd3e-2545-4894-9146-45336d89695a" (UID: "6347dd3e-2545-4894-9146-45336d89695a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 07:05:02 crc kubenswrapper[4661]: I1001 07:05:02.884792 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6347dd3e-2545-4894-9146-45336d89695a-kube-api-access-wnhh7" (OuterVolumeSpecName: "kube-api-access-wnhh7") pod "6347dd3e-2545-4894-9146-45336d89695a" (UID: "6347dd3e-2545-4894-9146-45336d89695a"). InnerVolumeSpecName "kube-api-access-wnhh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 07:05:02 crc kubenswrapper[4661]: I1001 07:05:02.982928 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6347dd3e-2545-4894-9146-45336d89695a-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 07:05:02 crc kubenswrapper[4661]: I1001 07:05:02.982978 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wnhh7\" (UniqueName: \"kubernetes.io/projected/6347dd3e-2545-4894-9146-45336d89695a-kube-api-access-wnhh7\") on node \"crc\" DevicePath \"\"" Oct 01 07:05:02 crc kubenswrapper[4661]: I1001 07:05:02.997282 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6347dd3e-2545-4894-9146-45336d89695a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6347dd3e-2545-4894-9146-45336d89695a" (UID: "6347dd3e-2545-4894-9146-45336d89695a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 07:05:03 crc kubenswrapper[4661]: I1001 07:05:03.085693 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6347dd3e-2545-4894-9146-45336d89695a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 07:05:03 crc kubenswrapper[4661]: I1001 07:05:03.184767 4661 generic.go:334] "Generic (PLEG): container finished" podID="6347dd3e-2545-4894-9146-45336d89695a" containerID="4e845cafceb86940725a15423aa8ba689d238a4b0f9a5b069c58d09654db886b" exitCode=0 Oct 01 07:05:03 crc kubenswrapper[4661]: I1001 07:05:03.184834 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8rdm7" event={"ID":"6347dd3e-2545-4894-9146-45336d89695a","Type":"ContainerDied","Data":"4e845cafceb86940725a15423aa8ba689d238a4b0f9a5b069c58d09654db886b"} Oct 01 07:05:03 crc kubenswrapper[4661]: I1001 07:05:03.184845 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8rdm7" Oct 01 07:05:03 crc kubenswrapper[4661]: I1001 07:05:03.184874 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8rdm7" event={"ID":"6347dd3e-2545-4894-9146-45336d89695a","Type":"ContainerDied","Data":"c82ae278ac88946f1802adfc9e9bc076b762a37acba65f0c9c2d2968c5cfe252"} Oct 01 07:05:03 crc kubenswrapper[4661]: I1001 07:05:03.184902 4661 scope.go:117] "RemoveContainer" containerID="4e845cafceb86940725a15423aa8ba689d238a4b0f9a5b069c58d09654db886b" Oct 01 07:05:03 crc kubenswrapper[4661]: I1001 07:05:03.225054 4661 scope.go:117] "RemoveContainer" containerID="bb1d01ff1e903e0f3e89af8bc4e5afa0dd758ef6ba3c41ec3dbe1f18ebc366c6" Oct 01 07:05:03 crc kubenswrapper[4661]: I1001 07:05:03.253040 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8rdm7"] Oct 01 07:05:03 crc kubenswrapper[4661]: I1001 07:05:03.270399 4661 scope.go:117] "RemoveContainer" containerID="ef3f8546b945e08868dcdcb649439482c768903e2ae6ed691f42d43e3a875eb0" Oct 01 07:05:03 crc kubenswrapper[4661]: I1001 07:05:03.272880 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8rdm7"] Oct 01 07:05:03 crc kubenswrapper[4661]: I1001 07:05:03.328821 4661 scope.go:117] "RemoveContainer" containerID="4e845cafceb86940725a15423aa8ba689d238a4b0f9a5b069c58d09654db886b" Oct 01 07:05:03 crc kubenswrapper[4661]: E1001 07:05:03.329390 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e845cafceb86940725a15423aa8ba689d238a4b0f9a5b069c58d09654db886b\": container with ID starting with 4e845cafceb86940725a15423aa8ba689d238a4b0f9a5b069c58d09654db886b not found: ID does not exist" containerID="4e845cafceb86940725a15423aa8ba689d238a4b0f9a5b069c58d09654db886b" Oct 01 07:05:03 crc kubenswrapper[4661]: I1001 07:05:03.329436 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e845cafceb86940725a15423aa8ba689d238a4b0f9a5b069c58d09654db886b"} err="failed to get container status \"4e845cafceb86940725a15423aa8ba689d238a4b0f9a5b069c58d09654db886b\": rpc error: code = NotFound desc = could not find container \"4e845cafceb86940725a15423aa8ba689d238a4b0f9a5b069c58d09654db886b\": container with ID starting with 4e845cafceb86940725a15423aa8ba689d238a4b0f9a5b069c58d09654db886b not found: ID does not exist" Oct 01 07:05:03 crc kubenswrapper[4661]: I1001 07:05:03.329468 4661 scope.go:117] "RemoveContainer" containerID="bb1d01ff1e903e0f3e89af8bc4e5afa0dd758ef6ba3c41ec3dbe1f18ebc366c6" Oct 01 07:05:03 crc kubenswrapper[4661]: E1001 07:05:03.329965 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb1d01ff1e903e0f3e89af8bc4e5afa0dd758ef6ba3c41ec3dbe1f18ebc366c6\": container with ID starting with bb1d01ff1e903e0f3e89af8bc4e5afa0dd758ef6ba3c41ec3dbe1f18ebc366c6 not found: ID does not exist" containerID="bb1d01ff1e903e0f3e89af8bc4e5afa0dd758ef6ba3c41ec3dbe1f18ebc366c6" Oct 01 07:05:03 crc kubenswrapper[4661]: I1001 07:05:03.329996 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb1d01ff1e903e0f3e89af8bc4e5afa0dd758ef6ba3c41ec3dbe1f18ebc366c6"} err="failed to get container status \"bb1d01ff1e903e0f3e89af8bc4e5afa0dd758ef6ba3c41ec3dbe1f18ebc366c6\": rpc error: code = NotFound desc = could not find container \"bb1d01ff1e903e0f3e89af8bc4e5afa0dd758ef6ba3c41ec3dbe1f18ebc366c6\": container with ID starting with bb1d01ff1e903e0f3e89af8bc4e5afa0dd758ef6ba3c41ec3dbe1f18ebc366c6 not found: ID does not exist" Oct 01 07:05:03 crc kubenswrapper[4661]: I1001 07:05:03.330017 4661 scope.go:117] "RemoveContainer" containerID="ef3f8546b945e08868dcdcb649439482c768903e2ae6ed691f42d43e3a875eb0" Oct 01 07:05:03 crc kubenswrapper[4661]: E1001 07:05:03.330424 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef3f8546b945e08868dcdcb649439482c768903e2ae6ed691f42d43e3a875eb0\": container with ID starting with ef3f8546b945e08868dcdcb649439482c768903e2ae6ed691f42d43e3a875eb0 not found: ID does not exist" containerID="ef3f8546b945e08868dcdcb649439482c768903e2ae6ed691f42d43e3a875eb0" Oct 01 07:05:03 crc kubenswrapper[4661]: I1001 07:05:03.330494 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef3f8546b945e08868dcdcb649439482c768903e2ae6ed691f42d43e3a875eb0"} err="failed to get container status \"ef3f8546b945e08868dcdcb649439482c768903e2ae6ed691f42d43e3a875eb0\": rpc error: code = NotFound desc = could not find container \"ef3f8546b945e08868dcdcb649439482c768903e2ae6ed691f42d43e3a875eb0\": container with ID starting with ef3f8546b945e08868dcdcb649439482c768903e2ae6ed691f42d43e3a875eb0 not found: ID does not exist" Oct 01 07:05:03 crc kubenswrapper[4661]: I1001 07:05:03.778763 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6347dd3e-2545-4894-9146-45336d89695a" path="/var/lib/kubelet/pods/6347dd3e-2545-4894-9146-45336d89695a/volumes" Oct 01 07:05:05 crc kubenswrapper[4661]: I1001 07:05:05.756934 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:05:05 crc kubenswrapper[4661]: E1001 07:05:05.757479 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:05:12 crc kubenswrapper[4661]: I1001 07:05:12.721911 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-fsd44/must-gather-rtdmc"] Oct 01 07:05:12 crc kubenswrapper[4661]: E1001 07:05:12.722843 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6347dd3e-2545-4894-9146-45336d89695a" containerName="registry-server" Oct 01 07:05:12 crc kubenswrapper[4661]: I1001 07:05:12.722856 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6347dd3e-2545-4894-9146-45336d89695a" containerName="registry-server" Oct 01 07:05:12 crc kubenswrapper[4661]: E1001 07:05:12.722889 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6347dd3e-2545-4894-9146-45336d89695a" containerName="extract-content" Oct 01 07:05:12 crc kubenswrapper[4661]: I1001 07:05:12.722896 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6347dd3e-2545-4894-9146-45336d89695a" containerName="extract-content" Oct 01 07:05:12 crc kubenswrapper[4661]: E1001 07:05:12.722915 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6347dd3e-2545-4894-9146-45336d89695a" containerName="extract-utilities" Oct 01 07:05:12 crc kubenswrapper[4661]: I1001 07:05:12.722922 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="6347dd3e-2545-4894-9146-45336d89695a" containerName="extract-utilities" Oct 01 07:05:12 crc kubenswrapper[4661]: I1001 07:05:12.723116 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="6347dd3e-2545-4894-9146-45336d89695a" containerName="registry-server" Oct 01 07:05:12 crc kubenswrapper[4661]: I1001 07:05:12.724186 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fsd44/must-gather-rtdmc" Oct 01 07:05:12 crc kubenswrapper[4661]: I1001 07:05:12.726368 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-fsd44"/"default-dockercfg-kgt45" Oct 01 07:05:12 crc kubenswrapper[4661]: I1001 07:05:12.726552 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-fsd44"/"openshift-service-ca.crt" Oct 01 07:05:12 crc kubenswrapper[4661]: I1001 07:05:12.726719 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-fsd44"/"kube-root-ca.crt" Oct 01 07:05:12 crc kubenswrapper[4661]: I1001 07:05:12.729427 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-fsd44/must-gather-rtdmc"] Oct 01 07:05:12 crc kubenswrapper[4661]: I1001 07:05:12.866072 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8qsl\" (UniqueName: \"kubernetes.io/projected/ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d-kube-api-access-c8qsl\") pod \"must-gather-rtdmc\" (UID: \"ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d\") " pod="openshift-must-gather-fsd44/must-gather-rtdmc" Oct 01 07:05:12 crc kubenswrapper[4661]: I1001 07:05:12.866162 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d-must-gather-output\") pod \"must-gather-rtdmc\" (UID: \"ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d\") " pod="openshift-must-gather-fsd44/must-gather-rtdmc" Oct 01 07:05:12 crc kubenswrapper[4661]: I1001 07:05:12.968544 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8qsl\" (UniqueName: \"kubernetes.io/projected/ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d-kube-api-access-c8qsl\") pod \"must-gather-rtdmc\" (UID: \"ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d\") " pod="openshift-must-gather-fsd44/must-gather-rtdmc" Oct 01 07:05:12 crc kubenswrapper[4661]: I1001 07:05:12.968621 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d-must-gather-output\") pod \"must-gather-rtdmc\" (UID: \"ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d\") " pod="openshift-must-gather-fsd44/must-gather-rtdmc" Oct 01 07:05:12 crc kubenswrapper[4661]: I1001 07:05:12.969161 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d-must-gather-output\") pod \"must-gather-rtdmc\" (UID: \"ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d\") " pod="openshift-must-gather-fsd44/must-gather-rtdmc" Oct 01 07:05:12 crc kubenswrapper[4661]: I1001 07:05:12.991512 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8qsl\" (UniqueName: \"kubernetes.io/projected/ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d-kube-api-access-c8qsl\") pod \"must-gather-rtdmc\" (UID: \"ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d\") " pod="openshift-must-gather-fsd44/must-gather-rtdmc" Oct 01 07:05:13 crc kubenswrapper[4661]: I1001 07:05:13.079146 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fsd44/must-gather-rtdmc" Oct 01 07:05:13 crc kubenswrapper[4661]: W1001 07:05:13.573706 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba9b5dfd_f400_4915_bfa0_8d7c6d2e816d.slice/crio-11f2adc85182001700d4565ffcb8da56a3d3405862f98ced61ce28049386b3d6 WatchSource:0}: Error finding container 11f2adc85182001700d4565ffcb8da56a3d3405862f98ced61ce28049386b3d6: Status 404 returned error can't find the container with id 11f2adc85182001700d4565ffcb8da56a3d3405862f98ced61ce28049386b3d6 Oct 01 07:05:13 crc kubenswrapper[4661]: I1001 07:05:13.588783 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-fsd44/must-gather-rtdmc"] Oct 01 07:05:14 crc kubenswrapper[4661]: I1001 07:05:14.318557 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fsd44/must-gather-rtdmc" event={"ID":"ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d","Type":"ContainerStarted","Data":"11f2adc85182001700d4565ffcb8da56a3d3405862f98ced61ce28049386b3d6"} Oct 01 07:05:20 crc kubenswrapper[4661]: I1001 07:05:20.383884 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fsd44/must-gather-rtdmc" event={"ID":"ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d","Type":"ContainerStarted","Data":"4279895b982ae4300bb59d85e896484ba8763c8f9bf7676f6046fc1931fac061"} Oct 01 07:05:20 crc kubenswrapper[4661]: I1001 07:05:20.384352 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fsd44/must-gather-rtdmc" event={"ID":"ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d","Type":"ContainerStarted","Data":"a7002fbc9b3dff924b446636e09186459c843c775bad698827b77c2bc4f8aeb6"} Oct 01 07:05:20 crc kubenswrapper[4661]: I1001 07:05:20.415702 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-fsd44/must-gather-rtdmc" podStartSLOduration=2.519220314 podStartE2EDuration="8.41567706s" podCreationTimestamp="2025-10-01 07:05:12 +0000 UTC" firstStartedPulling="2025-10-01 07:05:13.576005941 +0000 UTC m=+5762.513984555" lastFinishedPulling="2025-10-01 07:05:19.472462687 +0000 UTC m=+5768.410441301" observedRunningTime="2025-10-01 07:05:20.409170812 +0000 UTC m=+5769.347149436" watchObservedRunningTime="2025-10-01 07:05:20.41567706 +0000 UTC m=+5769.353655714" Oct 01 07:05:20 crc kubenswrapper[4661]: I1001 07:05:20.758007 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:05:20 crc kubenswrapper[4661]: E1001 07:05:20.758540 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:05:25 crc kubenswrapper[4661]: I1001 07:05:25.048389 4661 scope.go:117] "RemoveContainer" containerID="d532e43626aa84f76de7a1241167f8682e4995d75051f4c832c0c597281f45e9" Oct 01 07:05:25 crc kubenswrapper[4661]: I1001 07:05:25.072345 4661 scope.go:117] "RemoveContainer" containerID="38e6d050bf8a5784077773026d636c525d02f4190300745da2be6912d50a4e6e" Oct 01 07:05:26 crc kubenswrapper[4661]: I1001 07:05:26.010629 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-fsd44/crc-debug-vpp6j"] Oct 01 07:05:26 crc kubenswrapper[4661]: I1001 07:05:26.014109 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fsd44/crc-debug-vpp6j" Oct 01 07:05:26 crc kubenswrapper[4661]: I1001 07:05:26.079172 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/aaec30fc-fca8-4a21-9692-3be10f4c9782-host\") pod \"crc-debug-vpp6j\" (UID: \"aaec30fc-fca8-4a21-9692-3be10f4c9782\") " pod="openshift-must-gather-fsd44/crc-debug-vpp6j" Oct 01 07:05:26 crc kubenswrapper[4661]: I1001 07:05:26.079528 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72pgr\" (UniqueName: \"kubernetes.io/projected/aaec30fc-fca8-4a21-9692-3be10f4c9782-kube-api-access-72pgr\") pod \"crc-debug-vpp6j\" (UID: \"aaec30fc-fca8-4a21-9692-3be10f4c9782\") " pod="openshift-must-gather-fsd44/crc-debug-vpp6j" Oct 01 07:05:26 crc kubenswrapper[4661]: I1001 07:05:26.182042 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/aaec30fc-fca8-4a21-9692-3be10f4c9782-host\") pod \"crc-debug-vpp6j\" (UID: \"aaec30fc-fca8-4a21-9692-3be10f4c9782\") " pod="openshift-must-gather-fsd44/crc-debug-vpp6j" Oct 01 07:05:26 crc kubenswrapper[4661]: I1001 07:05:26.182165 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72pgr\" (UniqueName: \"kubernetes.io/projected/aaec30fc-fca8-4a21-9692-3be10f4c9782-kube-api-access-72pgr\") pod \"crc-debug-vpp6j\" (UID: \"aaec30fc-fca8-4a21-9692-3be10f4c9782\") " pod="openshift-must-gather-fsd44/crc-debug-vpp6j" Oct 01 07:05:26 crc kubenswrapper[4661]: I1001 07:05:26.182194 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/aaec30fc-fca8-4a21-9692-3be10f4c9782-host\") pod \"crc-debug-vpp6j\" (UID: \"aaec30fc-fca8-4a21-9692-3be10f4c9782\") " pod="openshift-must-gather-fsd44/crc-debug-vpp6j" Oct 01 07:05:26 crc kubenswrapper[4661]: I1001 07:05:26.201874 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72pgr\" (UniqueName: \"kubernetes.io/projected/aaec30fc-fca8-4a21-9692-3be10f4c9782-kube-api-access-72pgr\") pod \"crc-debug-vpp6j\" (UID: \"aaec30fc-fca8-4a21-9692-3be10f4c9782\") " pod="openshift-must-gather-fsd44/crc-debug-vpp6j" Oct 01 07:05:26 crc kubenswrapper[4661]: I1001 07:05:26.338111 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fsd44/crc-debug-vpp6j" Oct 01 07:05:26 crc kubenswrapper[4661]: I1001 07:05:26.467063 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fsd44/crc-debug-vpp6j" event={"ID":"aaec30fc-fca8-4a21-9692-3be10f4c9782","Type":"ContainerStarted","Data":"5841863dab881a3f7be0f572ae51336ca77197df7e4c493dbfd19d6d2866f054"} Oct 01 07:05:35 crc kubenswrapper[4661]: I1001 07:05:35.757916 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:05:35 crc kubenswrapper[4661]: E1001 07:05:35.758661 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:05:37 crc kubenswrapper[4661]: I1001 07:05:37.594996 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fsd44/crc-debug-vpp6j" event={"ID":"aaec30fc-fca8-4a21-9692-3be10f4c9782","Type":"ContainerStarted","Data":"cee28461a550f7d6c958d5ba1d0b565d5af2dac5204e45cf0d1ce92b0788ff15"} Oct 01 07:05:37 crc kubenswrapper[4661]: I1001 07:05:37.613380 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-fsd44/crc-debug-vpp6j" podStartSLOduration=2.313510111 podStartE2EDuration="12.61335915s" podCreationTimestamp="2025-10-01 07:05:25 +0000 UTC" firstStartedPulling="2025-10-01 07:05:26.385083973 +0000 UTC m=+5775.323062587" lastFinishedPulling="2025-10-01 07:05:36.684933012 +0000 UTC m=+5785.622911626" observedRunningTime="2025-10-01 07:05:37.606673467 +0000 UTC m=+5786.544652081" watchObservedRunningTime="2025-10-01 07:05:37.61335915 +0000 UTC m=+5786.551337764" Oct 01 07:05:49 crc kubenswrapper[4661]: I1001 07:05:49.757592 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:05:49 crc kubenswrapper[4661]: E1001 07:05:49.758867 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:06:03 crc kubenswrapper[4661]: I1001 07:06:03.757250 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:06:03 crc kubenswrapper[4661]: E1001 07:06:03.758169 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:06:17 crc kubenswrapper[4661]: I1001 07:06:17.756751 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:06:17 crc kubenswrapper[4661]: E1001 07:06:17.757493 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:06:25 crc kubenswrapper[4661]: I1001 07:06:25.159626 4661 scope.go:117] "RemoveContainer" containerID="3aa3d8cfbc9b231a342a772ff926b3faa4d3b5b38122fa3aae86515de32a886d" Oct 01 07:06:30 crc kubenswrapper[4661]: I1001 07:06:30.757495 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:06:30 crc kubenswrapper[4661]: E1001 07:06:30.758440 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:06:43 crc kubenswrapper[4661]: I1001 07:06:43.757320 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:06:43 crc kubenswrapper[4661]: E1001 07:06:43.758016 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:06:45 crc kubenswrapper[4661]: I1001 07:06:45.259455 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-54f94df7b6-mhnj2_70d1ade0-7d6b-4c94-a376-ef7027a47a76/barbican-api-log/0.log" Oct 01 07:06:45 crc kubenswrapper[4661]: I1001 07:06:45.286749 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-54f94df7b6-mhnj2_70d1ade0-7d6b-4c94-a376-ef7027a47a76/barbican-api/0.log" Oct 01 07:06:45 crc kubenswrapper[4661]: I1001 07:06:45.501800 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-9964d7f68-mbqp4_e5417b7f-b126-4335-ac74-b5d8f5713aee/barbican-keystone-listener/0.log" Oct 01 07:06:45 crc kubenswrapper[4661]: I1001 07:06:45.517179 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-9964d7f68-mbqp4_e5417b7f-b126-4335-ac74-b5d8f5713aee/barbican-keystone-listener-log/0.log" Oct 01 07:06:45 crc kubenswrapper[4661]: I1001 07:06:45.705192 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6f9cbcb89-ndbgc_7097203a-fb10-4615-9115-97d10c5b114d/barbican-worker/0.log" Oct 01 07:06:45 crc kubenswrapper[4661]: I1001 07:06:45.723063 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6f9cbcb89-ndbgc_7097203a-fb10-4615-9115-97d10c5b114d/barbican-worker-log/0.log" Oct 01 07:06:45 crc kubenswrapper[4661]: I1001 07:06:45.952221 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6_bbd00e4d-aa89-4800-867e-d8f78c3d2c70/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:06:46 crc kubenswrapper[4661]: I1001 07:06:46.218792 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_6dda16ec-71bd-4cca-b332-96772962b417/ceilometer-notification-agent/0.log" Oct 01 07:06:46 crc kubenswrapper[4661]: I1001 07:06:46.245609 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_6dda16ec-71bd-4cca-b332-96772962b417/ceilometer-central-agent/0.log" Oct 01 07:06:46 crc kubenswrapper[4661]: I1001 07:06:46.264966 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_6dda16ec-71bd-4cca-b332-96772962b417/proxy-httpd/0.log" Oct 01 07:06:46 crc kubenswrapper[4661]: I1001 07:06:46.418333 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_6dda16ec-71bd-4cca-b332-96772962b417/sg-core/0.log" Oct 01 07:06:46 crc kubenswrapper[4661]: I1001 07:06:46.663711 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5/cinder-api-log/0.log" Oct 01 07:06:46 crc kubenswrapper[4661]: I1001 07:06:46.692938 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5/cinder-api/0.log" Oct 01 07:06:46 crc kubenswrapper[4661]: I1001 07:06:46.893806 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_29f0e9e2-bbd8-4459-bc99-db5d742a37b8/cinder-scheduler/0.log" Oct 01 07:06:46 crc kubenswrapper[4661]: I1001 07:06:46.940552 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_29f0e9e2-bbd8-4459-bc99-db5d742a37b8/probe/0.log" Oct 01 07:06:47 crc kubenswrapper[4661]: I1001 07:06:47.079399 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-9248r_c6c35cbf-45e4-4538-b2de-4dbabd413307/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:06:47 crc kubenswrapper[4661]: I1001 07:06:47.173167 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5_d5db5a1f-67ff-49de-af0f-2ddc5b6dc078/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:06:47 crc kubenswrapper[4661]: I1001 07:06:47.375964 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t_cbe5a564-344a-449a-a457-61e5002621a6/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:06:47 crc kubenswrapper[4661]: I1001 07:06:47.519727 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-77b58f4b85-pvpg8_3f69f735-866d-4ab5-9ef4-f940c2cc2ee5/init/0.log" Oct 01 07:06:47 crc kubenswrapper[4661]: I1001 07:06:47.897682 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-77b58f4b85-pvpg8_3f69f735-866d-4ab5-9ef4-f940c2cc2ee5/init/0.log" Oct 01 07:06:48 crc kubenswrapper[4661]: I1001 07:06:48.048835 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk_a68882a5-f8f7-40a2-8406-409452df5dc5/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:06:48 crc kubenswrapper[4661]: I1001 07:06:48.100177 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-77b58f4b85-pvpg8_3f69f735-866d-4ab5-9ef4-f940c2cc2ee5/dnsmasq-dns/0.log" Oct 01 07:06:48 crc kubenswrapper[4661]: I1001 07:06:48.250394 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e0802b8c-f0c0-4210-9618-ed452e52b5a0/glance-log/0.log" Oct 01 07:06:48 crc kubenswrapper[4661]: I1001 07:06:48.257418 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e0802b8c-f0c0-4210-9618-ed452e52b5a0/glance-httpd/0.log" Oct 01 07:06:48 crc kubenswrapper[4661]: I1001 07:06:48.380242 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_6aa66ead-9f31-4644-a6ba-b3f6ddb82c64/glance-httpd/0.log" Oct 01 07:06:48 crc kubenswrapper[4661]: I1001 07:06:48.470757 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_6aa66ead-9f31-4644-a6ba-b3f6ddb82c64/glance-log/0.log" Oct 01 07:06:48 crc kubenswrapper[4661]: I1001 07:06:48.693052 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-77d96d88fb-5fr24_a78c5827-b563-4f29-9a60-6810f67f943a/horizon/0.log" Oct 01 07:06:48 crc kubenswrapper[4661]: I1001 07:06:48.729973 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-nvtst_3b1a4183-d18a-4f41-b62a-12f52370c46e/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:06:48 crc kubenswrapper[4661]: I1001 07:06:48.979801 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-p6p8t_a9c45dbf-cdfc-49a6-ac9d-49609a690564/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:06:49 crc kubenswrapper[4661]: I1001 07:06:49.189334 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-77d96d88fb-5fr24_a78c5827-b563-4f29-9a60-6810f67f943a/horizon-log/0.log" Oct 01 07:06:49 crc kubenswrapper[4661]: I1001 07:06:49.278428 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29321641-pqtk6_bbe73ddd-f327-4f4b-ba2d-d647bae84361/keystone-cron/0.log" Oct 01 07:06:49 crc kubenswrapper[4661]: I1001 07:06:49.495011 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29321701-zsg45_850d09d7-51e1-4b5f-a53e-5d39cc38dac2/keystone-cron/0.log" Oct 01 07:06:49 crc kubenswrapper[4661]: I1001 07:06:49.598359 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7677654df9-tdbxq_a2287a17-7b4e-40d0-ba56-0e78abd1b1ec/keystone-api/0.log" Oct 01 07:06:49 crc kubenswrapper[4661]: I1001 07:06:49.627203 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_ea68e214-b71a-4f5b-b2ee-9091d484023d/kube-state-metrics/0.log" Oct 01 07:06:49 crc kubenswrapper[4661]: I1001 07:06:49.794616 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq_322bbaf3-0120-49be-90f1-04d42199e753/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:06:50 crc kubenswrapper[4661]: I1001 07:06:50.273854 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-c77567f6f-m5g68_35deb613-6735-4de6-ab11-50138ce73e30/neutron-httpd/0.log" Oct 01 07:06:50 crc kubenswrapper[4661]: I1001 07:06:50.384650 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-c77567f6f-m5g68_35deb613-6735-4de6-ab11-50138ce73e30/neutron-api/0.log" Oct 01 07:06:50 crc kubenswrapper[4661]: I1001 07:06:50.499930 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft_632190ce-99ee-453c-8cdb-103d2f512c6b/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:06:51 crc kubenswrapper[4661]: I1001 07:06:51.391822 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_2652d904-fc8d-4fe1-8980-896266d64eec/nova-cell0-conductor-conductor/0.log" Oct 01 07:06:51 crc kubenswrapper[4661]: I1001 07:06:51.967366 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_12ce0bf4-4fb7-44da-87d4-9592ef8848a1/nova-cell1-conductor-conductor/0.log" Oct 01 07:06:52 crc kubenswrapper[4661]: I1001 07:06:52.395615 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636/nova-api-log/0.log" Oct 01 07:06:52 crc kubenswrapper[4661]: I1001 07:06:52.548743 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_81e96b52-f038-466f-92cd-07f4f8574bd5/nova-cell1-novncproxy-novncproxy/0.log" Oct 01 07:06:52 crc kubenswrapper[4661]: I1001 07:06:52.563677 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636/nova-api-api/0.log" Oct 01 07:06:52 crc kubenswrapper[4661]: I1001 07:06:52.788703 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_2741b07c-1750-4920-a734-2f51af08ac8b/memcached/0.log" Oct 01 07:06:52 crc kubenswrapper[4661]: I1001 07:06:52.860272 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-2xtbm_2e4df47f-3fbf-4a44-89d7-fd97b1fef95f/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:06:52 crc kubenswrapper[4661]: I1001 07:06:52.934384 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_299202ce-4dac-4387-8684-b94ca8f9f1b3/nova-metadata-log/0.log" Oct 01 07:06:53 crc kubenswrapper[4661]: I1001 07:06:53.345211 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_c1001022-d4fa-47f2-804f-480807988029/nova-scheduler-scheduler/0.log" Oct 01 07:06:53 crc kubenswrapper[4661]: I1001 07:06:53.428316 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e892c883-6f23-415f-9e9d-bde45fefe01e/mysql-bootstrap/0.log" Oct 01 07:06:53 crc kubenswrapper[4661]: I1001 07:06:53.668468 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e892c883-6f23-415f-9e9d-bde45fefe01e/mysql-bootstrap/0.log" Oct 01 07:06:53 crc kubenswrapper[4661]: I1001 07:06:53.746039 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e892c883-6f23-415f-9e9d-bde45fefe01e/galera/0.log" Oct 01 07:06:53 crc kubenswrapper[4661]: I1001 07:06:53.924517 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_daca2202-a971-4201-81be-edef6f0c40f6/mysql-bootstrap/0.log" Oct 01 07:06:54 crc kubenswrapper[4661]: I1001 07:06:54.067945 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_daca2202-a971-4201-81be-edef6f0c40f6/mysql-bootstrap/0.log" Oct 01 07:06:54 crc kubenswrapper[4661]: I1001 07:06:54.118109 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_daca2202-a971-4201-81be-edef6f0c40f6/galera/0.log" Oct 01 07:06:54 crc kubenswrapper[4661]: I1001 07:06:54.280777 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_c10d9095-6d32-4b4a-8706-d06e0693ddb9/openstackclient/0.log" Oct 01 07:06:54 crc kubenswrapper[4661]: I1001 07:06:54.453967 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-hlpwz_c93ef988-b9e9-4cfc-950b-2b3060b6e4b8/openstack-network-exporter/0.log" Oct 01 07:06:54 crc kubenswrapper[4661]: I1001 07:06:54.520995 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_299202ce-4dac-4387-8684-b94ca8f9f1b3/nova-metadata-metadata/0.log" Oct 01 07:06:54 crc kubenswrapper[4661]: I1001 07:06:54.599150 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-mplg4_c5001332-068e-46eb-a21c-25e29832baab/ovn-controller/0.log" Oct 01 07:06:54 crc kubenswrapper[4661]: I1001 07:06:54.874375 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-wmxcl_34fe2d2b-33b8-4736-98c8-3b7ae70118dd/ovsdb-server-init/0.log" Oct 01 07:06:55 crc kubenswrapper[4661]: I1001 07:06:55.016402 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-wmxcl_34fe2d2b-33b8-4736-98c8-3b7ae70118dd/ovsdb-server-init/0.log" Oct 01 07:06:55 crc kubenswrapper[4661]: I1001 07:06:55.076140 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-wmxcl_34fe2d2b-33b8-4736-98c8-3b7ae70118dd/ovsdb-server/0.log" Oct 01 07:06:55 crc kubenswrapper[4661]: I1001 07:06:55.207756 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-wmxcl_34fe2d2b-33b8-4736-98c8-3b7ae70118dd/ovs-vswitchd/0.log" Oct 01 07:06:55 crc kubenswrapper[4661]: I1001 07:06:55.259157 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-v8wft_5dc6881d-aedd-4945-98d8-9993fedd71dd/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:06:55 crc kubenswrapper[4661]: I1001 07:06:55.391577 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_8586024c-fe6a-4ccd-adc0-2e8e2a1bf823/openstack-network-exporter/0.log" Oct 01 07:06:55 crc kubenswrapper[4661]: I1001 07:06:55.398359 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_8586024c-fe6a-4ccd-adc0-2e8e2a1bf823/ovn-northd/0.log" Oct 01 07:06:55 crc kubenswrapper[4661]: I1001 07:06:55.535521 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_dff60953-8a38-41cb-bc21-6192798508a1/openstack-network-exporter/0.log" Oct 01 07:06:55 crc kubenswrapper[4661]: I1001 07:06:55.611370 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_dff60953-8a38-41cb-bc21-6192798508a1/ovsdbserver-nb/0.log" Oct 01 07:06:55 crc kubenswrapper[4661]: I1001 07:06:55.681104 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_6152c766-cf88-4b8c-9c8a-372dcdd4e62b/openstack-network-exporter/0.log" Oct 01 07:06:55 crc kubenswrapper[4661]: I1001 07:06:55.747725 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_6152c766-cf88-4b8c-9c8a-372dcdd4e62b/ovsdbserver-sb/0.log" Oct 01 07:06:55 crc kubenswrapper[4661]: I1001 07:06:55.954173 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-558754b4b6-4khhg_59481d9b-6c9f-48ac-93d1-870dbfb6edaf/placement-api/0.log" Oct 01 07:06:56 crc kubenswrapper[4661]: I1001 07:06:56.043849 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-558754b4b6-4khhg_59481d9b-6c9f-48ac-93d1-870dbfb6edaf/placement-log/0.log" Oct 01 07:06:56 crc kubenswrapper[4661]: I1001 07:06:56.074100 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_8df66a7a-5bab-428f-b415-0e46c952f4fb/init-config-reloader/0.log" Oct 01 07:06:56 crc kubenswrapper[4661]: I1001 07:06:56.226322 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_8df66a7a-5bab-428f-b415-0e46c952f4fb/init-config-reloader/0.log" Oct 01 07:06:56 crc kubenswrapper[4661]: I1001 07:06:56.226378 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_8df66a7a-5bab-428f-b415-0e46c952f4fb/prometheus/0.log" Oct 01 07:06:56 crc kubenswrapper[4661]: I1001 07:06:56.230302 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_8df66a7a-5bab-428f-b415-0e46c952f4fb/config-reloader/0.log" Oct 01 07:06:56 crc kubenswrapper[4661]: I1001 07:06:56.233053 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_8df66a7a-5bab-428f-b415-0e46c952f4fb/thanos-sidecar/0.log" Oct 01 07:06:56 crc kubenswrapper[4661]: I1001 07:06:56.389892 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_066b014c-15b7-49e3-9f01-b758855ca8a2/setup-container/0.log" Oct 01 07:06:56 crc kubenswrapper[4661]: I1001 07:06:56.597581 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_1658ccd7-4bae-45bf-aa67-fc5c075a417c/setup-container/0.log" Oct 01 07:06:56 crc kubenswrapper[4661]: I1001 07:06:56.617661 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_066b014c-15b7-49e3-9f01-b758855ca8a2/rabbitmq/0.log" Oct 01 07:06:56 crc kubenswrapper[4661]: I1001 07:06:56.633537 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_066b014c-15b7-49e3-9f01-b758855ca8a2/setup-container/0.log" Oct 01 07:06:56 crc kubenswrapper[4661]: I1001 07:06:56.778782 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_1658ccd7-4bae-45bf-aa67-fc5c075a417c/setup-container/0.log" Oct 01 07:06:56 crc kubenswrapper[4661]: I1001 07:06:56.793306 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_1658ccd7-4bae-45bf-aa67-fc5c075a417c/rabbitmq/0.log" Oct 01 07:06:56 crc kubenswrapper[4661]: I1001 07:06:56.846776 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c9a455c2-0405-4416-a367-c34353ee3fa3/setup-container/0.log" Oct 01 07:06:57 crc kubenswrapper[4661]: I1001 07:06:57.023235 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c9a455c2-0405-4416-a367-c34353ee3fa3/rabbitmq/0.log" Oct 01 07:06:57 crc kubenswrapper[4661]: I1001 07:06:57.026421 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c9a455c2-0405-4416-a367-c34353ee3fa3/setup-container/0.log" Oct 01 07:06:57 crc kubenswrapper[4661]: I1001 07:06:57.060405 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv_c9202962-2893-458d-996c-2890fa302029/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:06:57 crc kubenswrapper[4661]: I1001 07:06:57.198059 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-sjh66_dc423888-0bfc-45b9-ba9f-6bc52e8df43b/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:06:57 crc kubenswrapper[4661]: I1001 07:06:57.248567 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k_a0c02866-25ee-4ef1-9bba-572422cabc26/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:06:57 crc kubenswrapper[4661]: I1001 07:06:57.408564 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-fwf7d_daed5adf-0ad7-4236-abc5-fcd5053645b7/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:06:57 crc kubenswrapper[4661]: I1001 07:06:57.466038 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-fzw95_7d367bb3-134b-460e-8cff-75aad6a88043/ssh-known-hosts-edpm-deployment/0.log" Oct 01 07:06:57 crc kubenswrapper[4661]: I1001 07:06:57.734012 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-57dbcb9cbf-64x2k_cb384d97-a4b8-4eba-ac70-0ba6843cec4e/proxy-server/0.log" Oct 01 07:06:57 crc kubenswrapper[4661]: I1001 07:06:57.760183 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-57dbcb9cbf-64x2k_cb384d97-a4b8-4eba-ac70-0ba6843cec4e/proxy-httpd/0.log" Oct 01 07:06:57 crc kubenswrapper[4661]: I1001 07:06:57.839230 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-t28p6_66f7b863-7d30-41b4-882c-c982fafa148a/swift-ring-rebalance/0.log" Oct 01 07:06:57 crc kubenswrapper[4661]: I1001 07:06:57.914255 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/account-auditor/0.log" Oct 01 07:06:57 crc kubenswrapper[4661]: I1001 07:06:57.918789 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/account-reaper/0.log" Oct 01 07:06:58 crc kubenswrapper[4661]: I1001 07:06:58.037577 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/account-replicator/0.log" Oct 01 07:06:58 crc kubenswrapper[4661]: I1001 07:06:58.103128 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/container-auditor/0.log" Oct 01 07:06:58 crc kubenswrapper[4661]: I1001 07:06:58.110908 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/account-server/0.log" Oct 01 07:06:58 crc kubenswrapper[4661]: I1001 07:06:58.175735 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/container-replicator/0.log" Oct 01 07:06:58 crc kubenswrapper[4661]: I1001 07:06:58.227219 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/container-server/0.log" Oct 01 07:06:58 crc kubenswrapper[4661]: I1001 07:06:58.309726 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/container-updater/0.log" Oct 01 07:06:58 crc kubenswrapper[4661]: I1001 07:06:58.315806 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/object-auditor/0.log" Oct 01 07:06:58 crc kubenswrapper[4661]: I1001 07:06:58.360518 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/object-expirer/0.log" Oct 01 07:06:58 crc kubenswrapper[4661]: I1001 07:06:58.430524 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/object-replicator/0.log" Oct 01 07:06:58 crc kubenswrapper[4661]: I1001 07:06:58.513673 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/object-server/0.log" Oct 01 07:06:58 crc kubenswrapper[4661]: I1001 07:06:58.514482 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/object-updater/0.log" Oct 01 07:06:58 crc kubenswrapper[4661]: I1001 07:06:58.544329 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/rsync/0.log" Oct 01 07:06:58 crc kubenswrapper[4661]: I1001 07:06:58.598450 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/swift-recon-cron/0.log" Oct 01 07:06:58 crc kubenswrapper[4661]: I1001 07:06:58.718267 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs_e2078d83-8d53-4052-8b77-031948bc8705/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:06:58 crc kubenswrapper[4661]: I1001 07:06:58.757177 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:06:58 crc kubenswrapper[4661]: E1001 07:06:58.757434 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:06:58 crc kubenswrapper[4661]: I1001 07:06:58.822663 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_ff1ca911-a470-4bfb-8cc2-3f76257eed1f/tempest-tests-tempest-tests-runner/0.log" Oct 01 07:06:58 crc kubenswrapper[4661]: I1001 07:06:58.871826 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_1c1b7300-2c13-4f7a-92d8-be7ff9e0374c/test-operator-logs-container/0.log" Oct 01 07:06:59 crc kubenswrapper[4661]: I1001 07:06:59.037826 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq_5ec720ac-37b8-4dab-9b08-717b48bfae27/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:06:59 crc kubenswrapper[4661]: I1001 07:06:59.794937 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-applier-0_b6ede629-3e6b-448b-be47-77ce371d40f7/watcher-applier/0.log" Oct 01 07:07:00 crc kubenswrapper[4661]: I1001 07:07:00.179246 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_0858ffab-0d1d-422b-8ac2-abeef9ab22ed/watcher-api-log/0.log" Oct 01 07:07:02 crc kubenswrapper[4661]: I1001 07:07:02.480887 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-decision-engine-0_7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a/watcher-decision-engine/0.log" Oct 01 07:07:03 crc kubenswrapper[4661]: I1001 07:07:03.401988 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_0858ffab-0d1d-422b-8ac2-abeef9ab22ed/watcher-api/0.log" Oct 01 07:07:09 crc kubenswrapper[4661]: I1001 07:07:09.757774 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:07:09 crc kubenswrapper[4661]: E1001 07:07:09.758421 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:07:24 crc kubenswrapper[4661]: I1001 07:07:24.757310 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:07:24 crc kubenswrapper[4661]: E1001 07:07:24.758059 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:07:37 crc kubenswrapper[4661]: I1001 07:07:37.277489 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cs7z9"] Oct 01 07:07:37 crc kubenswrapper[4661]: I1001 07:07:37.280689 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cs7z9" Oct 01 07:07:37 crc kubenswrapper[4661]: I1001 07:07:37.303722 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cs7z9"] Oct 01 07:07:37 crc kubenswrapper[4661]: I1001 07:07:37.398733 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb9b3386-7aad-4101-8a63-46ece1327718-utilities\") pod \"community-operators-cs7z9\" (UID: \"eb9b3386-7aad-4101-8a63-46ece1327718\") " pod="openshift-marketplace/community-operators-cs7z9" Oct 01 07:07:37 crc kubenswrapper[4661]: I1001 07:07:37.399018 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb9b3386-7aad-4101-8a63-46ece1327718-catalog-content\") pod \"community-operators-cs7z9\" (UID: \"eb9b3386-7aad-4101-8a63-46ece1327718\") " pod="openshift-marketplace/community-operators-cs7z9" Oct 01 07:07:37 crc kubenswrapper[4661]: I1001 07:07:37.399154 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8ndl\" (UniqueName: \"kubernetes.io/projected/eb9b3386-7aad-4101-8a63-46ece1327718-kube-api-access-p8ndl\") pod \"community-operators-cs7z9\" (UID: \"eb9b3386-7aad-4101-8a63-46ece1327718\") " pod="openshift-marketplace/community-operators-cs7z9" Oct 01 07:07:37 crc kubenswrapper[4661]: I1001 07:07:37.501022 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb9b3386-7aad-4101-8a63-46ece1327718-catalog-content\") pod \"community-operators-cs7z9\" (UID: \"eb9b3386-7aad-4101-8a63-46ece1327718\") " pod="openshift-marketplace/community-operators-cs7z9" Oct 01 07:07:37 crc kubenswrapper[4661]: I1001 07:07:37.501106 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8ndl\" (UniqueName: \"kubernetes.io/projected/eb9b3386-7aad-4101-8a63-46ece1327718-kube-api-access-p8ndl\") pod \"community-operators-cs7z9\" (UID: \"eb9b3386-7aad-4101-8a63-46ece1327718\") " pod="openshift-marketplace/community-operators-cs7z9" Oct 01 07:07:37 crc kubenswrapper[4661]: I1001 07:07:37.501160 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb9b3386-7aad-4101-8a63-46ece1327718-utilities\") pod \"community-operators-cs7z9\" (UID: \"eb9b3386-7aad-4101-8a63-46ece1327718\") " pod="openshift-marketplace/community-operators-cs7z9" Oct 01 07:07:37 crc kubenswrapper[4661]: I1001 07:07:37.501720 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb9b3386-7aad-4101-8a63-46ece1327718-utilities\") pod \"community-operators-cs7z9\" (UID: \"eb9b3386-7aad-4101-8a63-46ece1327718\") " pod="openshift-marketplace/community-operators-cs7z9" Oct 01 07:07:37 crc kubenswrapper[4661]: I1001 07:07:37.501722 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb9b3386-7aad-4101-8a63-46ece1327718-catalog-content\") pod \"community-operators-cs7z9\" (UID: \"eb9b3386-7aad-4101-8a63-46ece1327718\") " pod="openshift-marketplace/community-operators-cs7z9" Oct 01 07:07:37 crc kubenswrapper[4661]: I1001 07:07:37.523849 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8ndl\" (UniqueName: \"kubernetes.io/projected/eb9b3386-7aad-4101-8a63-46ece1327718-kube-api-access-p8ndl\") pod \"community-operators-cs7z9\" (UID: \"eb9b3386-7aad-4101-8a63-46ece1327718\") " pod="openshift-marketplace/community-operators-cs7z9" Oct 01 07:07:37 crc kubenswrapper[4661]: I1001 07:07:37.665061 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cs7z9" Oct 01 07:07:38 crc kubenswrapper[4661]: I1001 07:07:38.305313 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cs7z9"] Oct 01 07:07:38 crc kubenswrapper[4661]: I1001 07:07:38.757869 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:07:38 crc kubenswrapper[4661]: E1001 07:07:38.758066 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:07:38 crc kubenswrapper[4661]: I1001 07:07:38.816551 4661 generic.go:334] "Generic (PLEG): container finished" podID="eb9b3386-7aad-4101-8a63-46ece1327718" containerID="f38b5decfa1df62f0c9b21512d941b02892f800a8d2b4ca8d7a84ebf6d9fff9b" exitCode=0 Oct 01 07:07:38 crc kubenswrapper[4661]: I1001 07:07:38.816842 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cs7z9" event={"ID":"eb9b3386-7aad-4101-8a63-46ece1327718","Type":"ContainerDied","Data":"f38b5decfa1df62f0c9b21512d941b02892f800a8d2b4ca8d7a84ebf6d9fff9b"} Oct 01 07:07:38 crc kubenswrapper[4661]: I1001 07:07:38.816867 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cs7z9" event={"ID":"eb9b3386-7aad-4101-8a63-46ece1327718","Type":"ContainerStarted","Data":"0a929fdf449ff3cad6090549f15af80732e45de571d04425ce67ea58ecb66b01"} Oct 01 07:07:39 crc kubenswrapper[4661]: I1001 07:07:39.847916 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cs7z9" event={"ID":"eb9b3386-7aad-4101-8a63-46ece1327718","Type":"ContainerStarted","Data":"19a8027d6eab857d24974757a2d5203e2540bb1db953bb75acd0e61ede7541ab"} Oct 01 07:07:41 crc kubenswrapper[4661]: I1001 07:07:41.871323 4661 generic.go:334] "Generic (PLEG): container finished" podID="eb9b3386-7aad-4101-8a63-46ece1327718" containerID="19a8027d6eab857d24974757a2d5203e2540bb1db953bb75acd0e61ede7541ab" exitCode=0 Oct 01 07:07:41 crc kubenswrapper[4661]: I1001 07:07:41.871391 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cs7z9" event={"ID":"eb9b3386-7aad-4101-8a63-46ece1327718","Type":"ContainerDied","Data":"19a8027d6eab857d24974757a2d5203e2540bb1db953bb75acd0e61ede7541ab"} Oct 01 07:07:41 crc kubenswrapper[4661]: I1001 07:07:41.876247 4661 generic.go:334] "Generic (PLEG): container finished" podID="aaec30fc-fca8-4a21-9692-3be10f4c9782" containerID="cee28461a550f7d6c958d5ba1d0b565d5af2dac5204e45cf0d1ce92b0788ff15" exitCode=0 Oct 01 07:07:41 crc kubenswrapper[4661]: I1001 07:07:41.876299 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fsd44/crc-debug-vpp6j" event={"ID":"aaec30fc-fca8-4a21-9692-3be10f4c9782","Type":"ContainerDied","Data":"cee28461a550f7d6c958d5ba1d0b565d5af2dac5204e45cf0d1ce92b0788ff15"} Oct 01 07:07:42 crc kubenswrapper[4661]: I1001 07:07:42.891510 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cs7z9" event={"ID":"eb9b3386-7aad-4101-8a63-46ece1327718","Type":"ContainerStarted","Data":"3eba1d6f70214f4f8e7ee7c15dadae0cf365b735699e88d817d6c030f2b26c64"} Oct 01 07:07:43 crc kubenswrapper[4661]: I1001 07:07:43.043123 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fsd44/crc-debug-vpp6j" Oct 01 07:07:43 crc kubenswrapper[4661]: I1001 07:07:43.061606 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cs7z9" podStartSLOduration=2.543878946 podStartE2EDuration="6.061588402s" podCreationTimestamp="2025-10-01 07:07:37 +0000 UTC" firstStartedPulling="2025-10-01 07:07:38.819285841 +0000 UTC m=+5907.757264485" lastFinishedPulling="2025-10-01 07:07:42.336995307 +0000 UTC m=+5911.274973941" observedRunningTime="2025-10-01 07:07:42.938141651 +0000 UTC m=+5911.876120275" watchObservedRunningTime="2025-10-01 07:07:43.061588402 +0000 UTC m=+5911.999567036" Oct 01 07:07:43 crc kubenswrapper[4661]: I1001 07:07:43.081274 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-fsd44/crc-debug-vpp6j"] Oct 01 07:07:43 crc kubenswrapper[4661]: I1001 07:07:43.091467 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-fsd44/crc-debug-vpp6j"] Oct 01 07:07:43 crc kubenswrapper[4661]: I1001 07:07:43.112428 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72pgr\" (UniqueName: \"kubernetes.io/projected/aaec30fc-fca8-4a21-9692-3be10f4c9782-kube-api-access-72pgr\") pod \"aaec30fc-fca8-4a21-9692-3be10f4c9782\" (UID: \"aaec30fc-fca8-4a21-9692-3be10f4c9782\") " Oct 01 07:07:43 crc kubenswrapper[4661]: I1001 07:07:43.112557 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/aaec30fc-fca8-4a21-9692-3be10f4c9782-host\") pod \"aaec30fc-fca8-4a21-9692-3be10f4c9782\" (UID: \"aaec30fc-fca8-4a21-9692-3be10f4c9782\") " Oct 01 07:07:43 crc kubenswrapper[4661]: I1001 07:07:43.112829 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/aaec30fc-fca8-4a21-9692-3be10f4c9782-host" (OuterVolumeSpecName: "host") pod "aaec30fc-fca8-4a21-9692-3be10f4c9782" (UID: "aaec30fc-fca8-4a21-9692-3be10f4c9782"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 07:07:43 crc kubenswrapper[4661]: I1001 07:07:43.113158 4661 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/aaec30fc-fca8-4a21-9692-3be10f4c9782-host\") on node \"crc\" DevicePath \"\"" Oct 01 07:07:43 crc kubenswrapper[4661]: I1001 07:07:43.117934 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aaec30fc-fca8-4a21-9692-3be10f4c9782-kube-api-access-72pgr" (OuterVolumeSpecName: "kube-api-access-72pgr") pod "aaec30fc-fca8-4a21-9692-3be10f4c9782" (UID: "aaec30fc-fca8-4a21-9692-3be10f4c9782"). InnerVolumeSpecName "kube-api-access-72pgr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 07:07:43 crc kubenswrapper[4661]: I1001 07:07:43.215438 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72pgr\" (UniqueName: \"kubernetes.io/projected/aaec30fc-fca8-4a21-9692-3be10f4c9782-kube-api-access-72pgr\") on node \"crc\" DevicePath \"\"" Oct 01 07:07:43 crc kubenswrapper[4661]: I1001 07:07:43.772725 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aaec30fc-fca8-4a21-9692-3be10f4c9782" path="/var/lib/kubelet/pods/aaec30fc-fca8-4a21-9692-3be10f4c9782/volumes" Oct 01 07:07:43 crc kubenswrapper[4661]: I1001 07:07:43.900462 4661 scope.go:117] "RemoveContainer" containerID="cee28461a550f7d6c958d5ba1d0b565d5af2dac5204e45cf0d1ce92b0788ff15" Oct 01 07:07:43 crc kubenswrapper[4661]: I1001 07:07:43.900534 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fsd44/crc-debug-vpp6j" Oct 01 07:07:44 crc kubenswrapper[4661]: I1001 07:07:44.334192 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-fsd44/crc-debug-t7hp4"] Oct 01 07:07:44 crc kubenswrapper[4661]: E1001 07:07:44.335028 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaec30fc-fca8-4a21-9692-3be10f4c9782" containerName="container-00" Oct 01 07:07:44 crc kubenswrapper[4661]: I1001 07:07:44.335045 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaec30fc-fca8-4a21-9692-3be10f4c9782" containerName="container-00" Oct 01 07:07:44 crc kubenswrapper[4661]: I1001 07:07:44.335338 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="aaec30fc-fca8-4a21-9692-3be10f4c9782" containerName="container-00" Oct 01 07:07:44 crc kubenswrapper[4661]: I1001 07:07:44.336152 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fsd44/crc-debug-t7hp4" Oct 01 07:07:44 crc kubenswrapper[4661]: I1001 07:07:44.443444 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bqxx\" (UniqueName: \"kubernetes.io/projected/593571ec-2166-434f-a79a-a580bb7a88a9-kube-api-access-4bqxx\") pod \"crc-debug-t7hp4\" (UID: \"593571ec-2166-434f-a79a-a580bb7a88a9\") " pod="openshift-must-gather-fsd44/crc-debug-t7hp4" Oct 01 07:07:44 crc kubenswrapper[4661]: I1001 07:07:44.443945 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/593571ec-2166-434f-a79a-a580bb7a88a9-host\") pod \"crc-debug-t7hp4\" (UID: \"593571ec-2166-434f-a79a-a580bb7a88a9\") " pod="openshift-must-gather-fsd44/crc-debug-t7hp4" Oct 01 07:07:44 crc kubenswrapper[4661]: I1001 07:07:44.546124 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/593571ec-2166-434f-a79a-a580bb7a88a9-host\") pod \"crc-debug-t7hp4\" (UID: \"593571ec-2166-434f-a79a-a580bb7a88a9\") " pod="openshift-must-gather-fsd44/crc-debug-t7hp4" Oct 01 07:07:44 crc kubenswrapper[4661]: I1001 07:07:44.546260 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bqxx\" (UniqueName: \"kubernetes.io/projected/593571ec-2166-434f-a79a-a580bb7a88a9-kube-api-access-4bqxx\") pod \"crc-debug-t7hp4\" (UID: \"593571ec-2166-434f-a79a-a580bb7a88a9\") " pod="openshift-must-gather-fsd44/crc-debug-t7hp4" Oct 01 07:07:44 crc kubenswrapper[4661]: I1001 07:07:44.546337 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/593571ec-2166-434f-a79a-a580bb7a88a9-host\") pod \"crc-debug-t7hp4\" (UID: \"593571ec-2166-434f-a79a-a580bb7a88a9\") " pod="openshift-must-gather-fsd44/crc-debug-t7hp4" Oct 01 07:07:44 crc kubenswrapper[4661]: I1001 07:07:44.574286 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bqxx\" (UniqueName: \"kubernetes.io/projected/593571ec-2166-434f-a79a-a580bb7a88a9-kube-api-access-4bqxx\") pod \"crc-debug-t7hp4\" (UID: \"593571ec-2166-434f-a79a-a580bb7a88a9\") " pod="openshift-must-gather-fsd44/crc-debug-t7hp4" Oct 01 07:07:44 crc kubenswrapper[4661]: I1001 07:07:44.668066 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fsd44/crc-debug-t7hp4" Oct 01 07:07:44 crc kubenswrapper[4661]: W1001 07:07:44.705119 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod593571ec_2166_434f_a79a_a580bb7a88a9.slice/crio-8543a69aa3b42f4bd8e9e1e43b5b49e7de2495d1d39517abf3bf112d77101ccc WatchSource:0}: Error finding container 8543a69aa3b42f4bd8e9e1e43b5b49e7de2495d1d39517abf3bf112d77101ccc: Status 404 returned error can't find the container with id 8543a69aa3b42f4bd8e9e1e43b5b49e7de2495d1d39517abf3bf112d77101ccc Oct 01 07:07:44 crc kubenswrapper[4661]: I1001 07:07:44.913196 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fsd44/crc-debug-t7hp4" event={"ID":"593571ec-2166-434f-a79a-a580bb7a88a9","Type":"ContainerStarted","Data":"8543a69aa3b42f4bd8e9e1e43b5b49e7de2495d1d39517abf3bf112d77101ccc"} Oct 01 07:07:45 crc kubenswrapper[4661]: E1001 07:07:45.362558 4661 log.go:32] "ReopenContainerLog from runtime service failed" err="rpc error: code = Unknown desc = container is not running" containerID="9c89ead3089bc0d2b95393b0b74b2035a7a5ceb0b2fed9ebe900400d523ef470" Oct 01 07:07:45 crc kubenswrapper[4661]: E1001 07:07:45.362661 4661 container_log_manager.go:307] "Failed to rotate log for container" err="failed to rotate log \"/var/log/pods/openshift-must-gather-fsd44_crc-debug-t7hp4_593571ec-2166-434f-a79a-a580bb7a88a9/container-00/0.log\": failed to reopen container log \"9c89ead3089bc0d2b95393b0b74b2035a7a5ceb0b2fed9ebe900400d523ef470\": rpc error: code = Unknown desc = container is not running" worker=1 containerID="9c89ead3089bc0d2b95393b0b74b2035a7a5ceb0b2fed9ebe900400d523ef470" path="/var/log/pods/openshift-must-gather-fsd44_crc-debug-t7hp4_593571ec-2166-434f-a79a-a580bb7a88a9/container-00/0.log" currentSize=75224082 maxSize=52428800 Oct 01 07:07:45 crc kubenswrapper[4661]: I1001 07:07:45.931903 4661 generic.go:334] "Generic (PLEG): container finished" podID="593571ec-2166-434f-a79a-a580bb7a88a9" containerID="9c89ead3089bc0d2b95393b0b74b2035a7a5ceb0b2fed9ebe900400d523ef470" exitCode=0 Oct 01 07:07:45 crc kubenswrapper[4661]: I1001 07:07:45.932130 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fsd44/crc-debug-t7hp4" event={"ID":"593571ec-2166-434f-a79a-a580bb7a88a9","Type":"ContainerDied","Data":"9c89ead3089bc0d2b95393b0b74b2035a7a5ceb0b2fed9ebe900400d523ef470"} Oct 01 07:07:47 crc kubenswrapper[4661]: I1001 07:07:47.060145 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fsd44/crc-debug-t7hp4" Oct 01 07:07:47 crc kubenswrapper[4661]: I1001 07:07:47.194774 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/593571ec-2166-434f-a79a-a580bb7a88a9-host\") pod \"593571ec-2166-434f-a79a-a580bb7a88a9\" (UID: \"593571ec-2166-434f-a79a-a580bb7a88a9\") " Oct 01 07:07:47 crc kubenswrapper[4661]: I1001 07:07:47.194934 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bqxx\" (UniqueName: \"kubernetes.io/projected/593571ec-2166-434f-a79a-a580bb7a88a9-kube-api-access-4bqxx\") pod \"593571ec-2166-434f-a79a-a580bb7a88a9\" (UID: \"593571ec-2166-434f-a79a-a580bb7a88a9\") " Oct 01 07:07:47 crc kubenswrapper[4661]: I1001 07:07:47.200491 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/593571ec-2166-434f-a79a-a580bb7a88a9-kube-api-access-4bqxx" (OuterVolumeSpecName: "kube-api-access-4bqxx") pod "593571ec-2166-434f-a79a-a580bb7a88a9" (UID: "593571ec-2166-434f-a79a-a580bb7a88a9"). InnerVolumeSpecName "kube-api-access-4bqxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 07:07:47 crc kubenswrapper[4661]: I1001 07:07:47.200533 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/593571ec-2166-434f-a79a-a580bb7a88a9-host" (OuterVolumeSpecName: "host") pod "593571ec-2166-434f-a79a-a580bb7a88a9" (UID: "593571ec-2166-434f-a79a-a580bb7a88a9"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 07:07:47 crc kubenswrapper[4661]: I1001 07:07:47.296668 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bqxx\" (UniqueName: \"kubernetes.io/projected/593571ec-2166-434f-a79a-a580bb7a88a9-kube-api-access-4bqxx\") on node \"crc\" DevicePath \"\"" Oct 01 07:07:47 crc kubenswrapper[4661]: I1001 07:07:47.296695 4661 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/593571ec-2166-434f-a79a-a580bb7a88a9-host\") on node \"crc\" DevicePath \"\"" Oct 01 07:07:47 crc kubenswrapper[4661]: I1001 07:07:47.665335 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cs7z9" Oct 01 07:07:47 crc kubenswrapper[4661]: I1001 07:07:47.665377 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cs7z9" Oct 01 07:07:47 crc kubenswrapper[4661]: I1001 07:07:47.714168 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cs7z9" Oct 01 07:07:47 crc kubenswrapper[4661]: I1001 07:07:47.949432 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fsd44/crc-debug-t7hp4" event={"ID":"593571ec-2166-434f-a79a-a580bb7a88a9","Type":"ContainerDied","Data":"8543a69aa3b42f4bd8e9e1e43b5b49e7de2495d1d39517abf3bf112d77101ccc"} Oct 01 07:07:47 crc kubenswrapper[4661]: I1001 07:07:47.949466 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fsd44/crc-debug-t7hp4" Oct 01 07:07:47 crc kubenswrapper[4661]: I1001 07:07:47.949481 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8543a69aa3b42f4bd8e9e1e43b5b49e7de2495d1d39517abf3bf112d77101ccc" Oct 01 07:07:47 crc kubenswrapper[4661]: I1001 07:07:47.993698 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cs7z9" Oct 01 07:07:48 crc kubenswrapper[4661]: I1001 07:07:48.039743 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cs7z9"] Oct 01 07:07:49 crc kubenswrapper[4661]: I1001 07:07:49.964697 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cs7z9" podUID="eb9b3386-7aad-4101-8a63-46ece1327718" containerName="registry-server" containerID="cri-o://3eba1d6f70214f4f8e7ee7c15dadae0cf365b735699e88d817d6c030f2b26c64" gracePeriod=2 Oct 01 07:07:50 crc kubenswrapper[4661]: I1001 07:07:50.420432 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cs7z9" Oct 01 07:07:50 crc kubenswrapper[4661]: I1001 07:07:50.564334 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb9b3386-7aad-4101-8a63-46ece1327718-catalog-content\") pod \"eb9b3386-7aad-4101-8a63-46ece1327718\" (UID: \"eb9b3386-7aad-4101-8a63-46ece1327718\") " Oct 01 07:07:50 crc kubenswrapper[4661]: I1001 07:07:50.564650 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p8ndl\" (UniqueName: \"kubernetes.io/projected/eb9b3386-7aad-4101-8a63-46ece1327718-kube-api-access-p8ndl\") pod \"eb9b3386-7aad-4101-8a63-46ece1327718\" (UID: \"eb9b3386-7aad-4101-8a63-46ece1327718\") " Oct 01 07:07:50 crc kubenswrapper[4661]: I1001 07:07:50.564698 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb9b3386-7aad-4101-8a63-46ece1327718-utilities\") pod \"eb9b3386-7aad-4101-8a63-46ece1327718\" (UID: \"eb9b3386-7aad-4101-8a63-46ece1327718\") " Oct 01 07:07:50 crc kubenswrapper[4661]: I1001 07:07:50.565523 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb9b3386-7aad-4101-8a63-46ece1327718-utilities" (OuterVolumeSpecName: "utilities") pod "eb9b3386-7aad-4101-8a63-46ece1327718" (UID: "eb9b3386-7aad-4101-8a63-46ece1327718"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 07:07:50 crc kubenswrapper[4661]: I1001 07:07:50.576440 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb9b3386-7aad-4101-8a63-46ece1327718-kube-api-access-p8ndl" (OuterVolumeSpecName: "kube-api-access-p8ndl") pod "eb9b3386-7aad-4101-8a63-46ece1327718" (UID: "eb9b3386-7aad-4101-8a63-46ece1327718"). InnerVolumeSpecName "kube-api-access-p8ndl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 07:07:50 crc kubenswrapper[4661]: I1001 07:07:50.607793 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb9b3386-7aad-4101-8a63-46ece1327718-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eb9b3386-7aad-4101-8a63-46ece1327718" (UID: "eb9b3386-7aad-4101-8a63-46ece1327718"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 07:07:50 crc kubenswrapper[4661]: I1001 07:07:50.666254 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p8ndl\" (UniqueName: \"kubernetes.io/projected/eb9b3386-7aad-4101-8a63-46ece1327718-kube-api-access-p8ndl\") on node \"crc\" DevicePath \"\"" Oct 01 07:07:50 crc kubenswrapper[4661]: I1001 07:07:50.666462 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb9b3386-7aad-4101-8a63-46ece1327718-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 07:07:50 crc kubenswrapper[4661]: I1001 07:07:50.666475 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb9b3386-7aad-4101-8a63-46ece1327718-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 07:07:50 crc kubenswrapper[4661]: I1001 07:07:50.756673 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:07:50 crc kubenswrapper[4661]: E1001 07:07:50.757212 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:07:50 crc kubenswrapper[4661]: I1001 07:07:50.975886 4661 generic.go:334] "Generic (PLEG): container finished" podID="eb9b3386-7aad-4101-8a63-46ece1327718" containerID="3eba1d6f70214f4f8e7ee7c15dadae0cf365b735699e88d817d6c030f2b26c64" exitCode=0 Oct 01 07:07:50 crc kubenswrapper[4661]: I1001 07:07:50.975937 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cs7z9" Oct 01 07:07:50 crc kubenswrapper[4661]: I1001 07:07:50.975943 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cs7z9" event={"ID":"eb9b3386-7aad-4101-8a63-46ece1327718","Type":"ContainerDied","Data":"3eba1d6f70214f4f8e7ee7c15dadae0cf365b735699e88d817d6c030f2b26c64"} Oct 01 07:07:50 crc kubenswrapper[4661]: I1001 07:07:50.975991 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cs7z9" event={"ID":"eb9b3386-7aad-4101-8a63-46ece1327718","Type":"ContainerDied","Data":"0a929fdf449ff3cad6090549f15af80732e45de571d04425ce67ea58ecb66b01"} Oct 01 07:07:50 crc kubenswrapper[4661]: I1001 07:07:50.976019 4661 scope.go:117] "RemoveContainer" containerID="3eba1d6f70214f4f8e7ee7c15dadae0cf365b735699e88d817d6c030f2b26c64" Oct 01 07:07:50 crc kubenswrapper[4661]: I1001 07:07:50.994473 4661 scope.go:117] "RemoveContainer" containerID="19a8027d6eab857d24974757a2d5203e2540bb1db953bb75acd0e61ede7541ab" Oct 01 07:07:51 crc kubenswrapper[4661]: I1001 07:07:51.025702 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cs7z9"] Oct 01 07:07:51 crc kubenswrapper[4661]: I1001 07:07:51.028909 4661 scope.go:117] "RemoveContainer" containerID="f38b5decfa1df62f0c9b21512d941b02892f800a8d2b4ca8d7a84ebf6d9fff9b" Oct 01 07:07:51 crc kubenswrapper[4661]: I1001 07:07:51.035279 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cs7z9"] Oct 01 07:07:51 crc kubenswrapper[4661]: I1001 07:07:51.064015 4661 scope.go:117] "RemoveContainer" containerID="3eba1d6f70214f4f8e7ee7c15dadae0cf365b735699e88d817d6c030f2b26c64" Oct 01 07:07:51 crc kubenswrapper[4661]: E1001 07:07:51.064430 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3eba1d6f70214f4f8e7ee7c15dadae0cf365b735699e88d817d6c030f2b26c64\": container with ID starting with 3eba1d6f70214f4f8e7ee7c15dadae0cf365b735699e88d817d6c030f2b26c64 not found: ID does not exist" containerID="3eba1d6f70214f4f8e7ee7c15dadae0cf365b735699e88d817d6c030f2b26c64" Oct 01 07:07:51 crc kubenswrapper[4661]: I1001 07:07:51.064473 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3eba1d6f70214f4f8e7ee7c15dadae0cf365b735699e88d817d6c030f2b26c64"} err="failed to get container status \"3eba1d6f70214f4f8e7ee7c15dadae0cf365b735699e88d817d6c030f2b26c64\": rpc error: code = NotFound desc = could not find container \"3eba1d6f70214f4f8e7ee7c15dadae0cf365b735699e88d817d6c030f2b26c64\": container with ID starting with 3eba1d6f70214f4f8e7ee7c15dadae0cf365b735699e88d817d6c030f2b26c64 not found: ID does not exist" Oct 01 07:07:51 crc kubenswrapper[4661]: I1001 07:07:51.064496 4661 scope.go:117] "RemoveContainer" containerID="19a8027d6eab857d24974757a2d5203e2540bb1db953bb75acd0e61ede7541ab" Oct 01 07:07:51 crc kubenswrapper[4661]: E1001 07:07:51.064953 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19a8027d6eab857d24974757a2d5203e2540bb1db953bb75acd0e61ede7541ab\": container with ID starting with 19a8027d6eab857d24974757a2d5203e2540bb1db953bb75acd0e61ede7541ab not found: ID does not exist" containerID="19a8027d6eab857d24974757a2d5203e2540bb1db953bb75acd0e61ede7541ab" Oct 01 07:07:51 crc kubenswrapper[4661]: I1001 07:07:51.065001 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19a8027d6eab857d24974757a2d5203e2540bb1db953bb75acd0e61ede7541ab"} err="failed to get container status \"19a8027d6eab857d24974757a2d5203e2540bb1db953bb75acd0e61ede7541ab\": rpc error: code = NotFound desc = could not find container \"19a8027d6eab857d24974757a2d5203e2540bb1db953bb75acd0e61ede7541ab\": container with ID starting with 19a8027d6eab857d24974757a2d5203e2540bb1db953bb75acd0e61ede7541ab not found: ID does not exist" Oct 01 07:07:51 crc kubenswrapper[4661]: I1001 07:07:51.065036 4661 scope.go:117] "RemoveContainer" containerID="f38b5decfa1df62f0c9b21512d941b02892f800a8d2b4ca8d7a84ebf6d9fff9b" Oct 01 07:07:51 crc kubenswrapper[4661]: E1001 07:07:51.065401 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f38b5decfa1df62f0c9b21512d941b02892f800a8d2b4ca8d7a84ebf6d9fff9b\": container with ID starting with f38b5decfa1df62f0c9b21512d941b02892f800a8d2b4ca8d7a84ebf6d9fff9b not found: ID does not exist" containerID="f38b5decfa1df62f0c9b21512d941b02892f800a8d2b4ca8d7a84ebf6d9fff9b" Oct 01 07:07:51 crc kubenswrapper[4661]: I1001 07:07:51.065448 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f38b5decfa1df62f0c9b21512d941b02892f800a8d2b4ca8d7a84ebf6d9fff9b"} err="failed to get container status \"f38b5decfa1df62f0c9b21512d941b02892f800a8d2b4ca8d7a84ebf6d9fff9b\": rpc error: code = NotFound desc = could not find container \"f38b5decfa1df62f0c9b21512d941b02892f800a8d2b4ca8d7a84ebf6d9fff9b\": container with ID starting with f38b5decfa1df62f0c9b21512d941b02892f800a8d2b4ca8d7a84ebf6d9fff9b not found: ID does not exist" Oct 01 07:07:51 crc kubenswrapper[4661]: I1001 07:07:51.767698 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb9b3386-7aad-4101-8a63-46ece1327718" path="/var/lib/kubelet/pods/eb9b3386-7aad-4101-8a63-46ece1327718/volumes" Oct 01 07:07:55 crc kubenswrapper[4661]: I1001 07:07:55.501662 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-fsd44/crc-debug-t7hp4"] Oct 01 07:07:55 crc kubenswrapper[4661]: I1001 07:07:55.514703 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-fsd44/crc-debug-t7hp4"] Oct 01 07:07:55 crc kubenswrapper[4661]: I1001 07:07:55.767915 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="593571ec-2166-434f-a79a-a580bb7a88a9" path="/var/lib/kubelet/pods/593571ec-2166-434f-a79a-a580bb7a88a9/volumes" Oct 01 07:07:56 crc kubenswrapper[4661]: I1001 07:07:56.680268 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-fsd44/crc-debug-svlkn"] Oct 01 07:07:56 crc kubenswrapper[4661]: E1001 07:07:56.681056 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb9b3386-7aad-4101-8a63-46ece1327718" containerName="registry-server" Oct 01 07:07:56 crc kubenswrapper[4661]: I1001 07:07:56.681072 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb9b3386-7aad-4101-8a63-46ece1327718" containerName="registry-server" Oct 01 07:07:56 crc kubenswrapper[4661]: E1001 07:07:56.681091 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="593571ec-2166-434f-a79a-a580bb7a88a9" containerName="container-00" Oct 01 07:07:56 crc kubenswrapper[4661]: I1001 07:07:56.681100 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="593571ec-2166-434f-a79a-a580bb7a88a9" containerName="container-00" Oct 01 07:07:56 crc kubenswrapper[4661]: E1001 07:07:56.681121 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb9b3386-7aad-4101-8a63-46ece1327718" containerName="extract-content" Oct 01 07:07:56 crc kubenswrapper[4661]: I1001 07:07:56.681130 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb9b3386-7aad-4101-8a63-46ece1327718" containerName="extract-content" Oct 01 07:07:56 crc kubenswrapper[4661]: E1001 07:07:56.681158 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb9b3386-7aad-4101-8a63-46ece1327718" containerName="extract-utilities" Oct 01 07:07:56 crc kubenswrapper[4661]: I1001 07:07:56.681166 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb9b3386-7aad-4101-8a63-46ece1327718" containerName="extract-utilities" Oct 01 07:07:56 crc kubenswrapper[4661]: I1001 07:07:56.681417 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="593571ec-2166-434f-a79a-a580bb7a88a9" containerName="container-00" Oct 01 07:07:56 crc kubenswrapper[4661]: I1001 07:07:56.681443 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb9b3386-7aad-4101-8a63-46ece1327718" containerName="registry-server" Oct 01 07:07:56 crc kubenswrapper[4661]: I1001 07:07:56.682182 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fsd44/crc-debug-svlkn" Oct 01 07:07:56 crc kubenswrapper[4661]: I1001 07:07:56.775532 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0bc37c45-23e4-4cb2-b87b-5e880d7a0d67-host\") pod \"crc-debug-svlkn\" (UID: \"0bc37c45-23e4-4cb2-b87b-5e880d7a0d67\") " pod="openshift-must-gather-fsd44/crc-debug-svlkn" Oct 01 07:07:56 crc kubenswrapper[4661]: I1001 07:07:56.776018 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z75ph\" (UniqueName: \"kubernetes.io/projected/0bc37c45-23e4-4cb2-b87b-5e880d7a0d67-kube-api-access-z75ph\") pod \"crc-debug-svlkn\" (UID: \"0bc37c45-23e4-4cb2-b87b-5e880d7a0d67\") " pod="openshift-must-gather-fsd44/crc-debug-svlkn" Oct 01 07:07:56 crc kubenswrapper[4661]: I1001 07:07:56.877797 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0bc37c45-23e4-4cb2-b87b-5e880d7a0d67-host\") pod \"crc-debug-svlkn\" (UID: \"0bc37c45-23e4-4cb2-b87b-5e880d7a0d67\") " pod="openshift-must-gather-fsd44/crc-debug-svlkn" Oct 01 07:07:56 crc kubenswrapper[4661]: I1001 07:07:56.878097 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z75ph\" (UniqueName: \"kubernetes.io/projected/0bc37c45-23e4-4cb2-b87b-5e880d7a0d67-kube-api-access-z75ph\") pod \"crc-debug-svlkn\" (UID: \"0bc37c45-23e4-4cb2-b87b-5e880d7a0d67\") " pod="openshift-must-gather-fsd44/crc-debug-svlkn" Oct 01 07:07:56 crc kubenswrapper[4661]: I1001 07:07:56.879519 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0bc37c45-23e4-4cb2-b87b-5e880d7a0d67-host\") pod \"crc-debug-svlkn\" (UID: \"0bc37c45-23e4-4cb2-b87b-5e880d7a0d67\") " pod="openshift-must-gather-fsd44/crc-debug-svlkn" Oct 01 07:07:56 crc kubenswrapper[4661]: I1001 07:07:56.914443 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z75ph\" (UniqueName: \"kubernetes.io/projected/0bc37c45-23e4-4cb2-b87b-5e880d7a0d67-kube-api-access-z75ph\") pod \"crc-debug-svlkn\" (UID: \"0bc37c45-23e4-4cb2-b87b-5e880d7a0d67\") " pod="openshift-must-gather-fsd44/crc-debug-svlkn" Oct 01 07:07:57 crc kubenswrapper[4661]: I1001 07:07:57.014628 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fsd44/crc-debug-svlkn" Oct 01 07:07:58 crc kubenswrapper[4661]: I1001 07:07:58.051495 4661 generic.go:334] "Generic (PLEG): container finished" podID="0bc37c45-23e4-4cb2-b87b-5e880d7a0d67" containerID="8bd4bbc33e5e9945af94a9df8a9b192dcbf0ddf9641793852feb6ef51a428cc0" exitCode=0 Oct 01 07:07:58 crc kubenswrapper[4661]: I1001 07:07:58.051756 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fsd44/crc-debug-svlkn" event={"ID":"0bc37c45-23e4-4cb2-b87b-5e880d7a0d67","Type":"ContainerDied","Data":"8bd4bbc33e5e9945af94a9df8a9b192dcbf0ddf9641793852feb6ef51a428cc0"} Oct 01 07:07:58 crc kubenswrapper[4661]: I1001 07:07:58.052148 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fsd44/crc-debug-svlkn" event={"ID":"0bc37c45-23e4-4cb2-b87b-5e880d7a0d67","Type":"ContainerStarted","Data":"7d72db4935dab3f4f421b644a2200e7358e308ae2a9d7ec3367d1aba6161aa22"} Oct 01 07:07:58 crc kubenswrapper[4661]: I1001 07:07:58.111308 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-fsd44/crc-debug-svlkn"] Oct 01 07:07:58 crc kubenswrapper[4661]: I1001 07:07:58.130133 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-fsd44/crc-debug-svlkn"] Oct 01 07:07:59 crc kubenswrapper[4661]: I1001 07:07:59.163915 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fsd44/crc-debug-svlkn" Oct 01 07:07:59 crc kubenswrapper[4661]: I1001 07:07:59.225339 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0bc37c45-23e4-4cb2-b87b-5e880d7a0d67-host\") pod \"0bc37c45-23e4-4cb2-b87b-5e880d7a0d67\" (UID: \"0bc37c45-23e4-4cb2-b87b-5e880d7a0d67\") " Oct 01 07:07:59 crc kubenswrapper[4661]: I1001 07:07:59.225460 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0bc37c45-23e4-4cb2-b87b-5e880d7a0d67-host" (OuterVolumeSpecName: "host") pod "0bc37c45-23e4-4cb2-b87b-5e880d7a0d67" (UID: "0bc37c45-23e4-4cb2-b87b-5e880d7a0d67"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 07:07:59 crc kubenswrapper[4661]: I1001 07:07:59.225475 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z75ph\" (UniqueName: \"kubernetes.io/projected/0bc37c45-23e4-4cb2-b87b-5e880d7a0d67-kube-api-access-z75ph\") pod \"0bc37c45-23e4-4cb2-b87b-5e880d7a0d67\" (UID: \"0bc37c45-23e4-4cb2-b87b-5e880d7a0d67\") " Oct 01 07:07:59 crc kubenswrapper[4661]: I1001 07:07:59.226147 4661 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0bc37c45-23e4-4cb2-b87b-5e880d7a0d67-host\") on node \"crc\" DevicePath \"\"" Oct 01 07:07:59 crc kubenswrapper[4661]: I1001 07:07:59.231744 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bc37c45-23e4-4cb2-b87b-5e880d7a0d67-kube-api-access-z75ph" (OuterVolumeSpecName: "kube-api-access-z75ph") pod "0bc37c45-23e4-4cb2-b87b-5e880d7a0d67" (UID: "0bc37c45-23e4-4cb2-b87b-5e880d7a0d67"). InnerVolumeSpecName "kube-api-access-z75ph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 07:07:59 crc kubenswrapper[4661]: I1001 07:07:59.328479 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z75ph\" (UniqueName: \"kubernetes.io/projected/0bc37c45-23e4-4cb2-b87b-5e880d7a0d67-kube-api-access-z75ph\") on node \"crc\" DevicePath \"\"" Oct 01 07:07:59 crc kubenswrapper[4661]: I1001 07:07:59.766441 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0bc37c45-23e4-4cb2-b87b-5e880d7a0d67" path="/var/lib/kubelet/pods/0bc37c45-23e4-4cb2-b87b-5e880d7a0d67/volumes" Oct 01 07:07:59 crc kubenswrapper[4661]: I1001 07:07:59.800613 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-f7f98cb69-p6lbb_1e3e3612-5d8a-4db4-af00-94428fcb570e/kube-rbac-proxy/0.log" Oct 01 07:07:59 crc kubenswrapper[4661]: I1001 07:07:59.813581 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-f7f98cb69-p6lbb_1e3e3612-5d8a-4db4-af00-94428fcb570e/manager/0.log" Oct 01 07:07:59 crc kubenswrapper[4661]: I1001 07:07:59.949957 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859cd486d-qslns_a23b3c29-b18c-4ea0-8723-41000d6a754b/kube-rbac-proxy/0.log" Oct 01 07:08:00 crc kubenswrapper[4661]: I1001 07:08:00.021579 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859cd486d-qslns_a23b3c29-b18c-4ea0-8723-41000d6a754b/manager/0.log" Oct 01 07:08:00 crc kubenswrapper[4661]: I1001 07:08:00.071534 4661 scope.go:117] "RemoveContainer" containerID="8bd4bbc33e5e9945af94a9df8a9b192dcbf0ddf9641793852feb6ef51a428cc0" Oct 01 07:08:00 crc kubenswrapper[4661]: I1001 07:08:00.071671 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fsd44/crc-debug-svlkn" Oct 01 07:08:00 crc kubenswrapper[4661]: I1001 07:08:00.131047 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-77fb7bcf5b-jszh9_dfdbd600-7bd1-43fa-a4cf-44f66f79e3e2/kube-rbac-proxy/0.log" Oct 01 07:08:00 crc kubenswrapper[4661]: I1001 07:08:00.132970 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-77fb7bcf5b-jszh9_dfdbd600-7bd1-43fa-a4cf-44f66f79e3e2/manager/0.log" Oct 01 07:08:00 crc kubenswrapper[4661]: I1001 07:08:00.198303 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd_e97f5f22-3f8d-416c-a584-6d49ccb28f70/util/0.log" Oct 01 07:08:00 crc kubenswrapper[4661]: I1001 07:08:00.378542 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd_e97f5f22-3f8d-416c-a584-6d49ccb28f70/util/0.log" Oct 01 07:08:00 crc kubenswrapper[4661]: I1001 07:08:00.394929 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd_e97f5f22-3f8d-416c-a584-6d49ccb28f70/pull/0.log" Oct 01 07:08:00 crc kubenswrapper[4661]: I1001 07:08:00.420983 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd_e97f5f22-3f8d-416c-a584-6d49ccb28f70/pull/0.log" Oct 01 07:08:00 crc kubenswrapper[4661]: I1001 07:08:00.586687 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd_e97f5f22-3f8d-416c-a584-6d49ccb28f70/util/0.log" Oct 01 07:08:00 crc kubenswrapper[4661]: I1001 07:08:00.612670 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd_e97f5f22-3f8d-416c-a584-6d49ccb28f70/extract/0.log" Oct 01 07:08:00 crc kubenswrapper[4661]: I1001 07:08:00.628595 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd_e97f5f22-3f8d-416c-a584-6d49ccb28f70/pull/0.log" Oct 01 07:08:00 crc kubenswrapper[4661]: I1001 07:08:00.749837 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-8bc4775b5-fmrmk_29b41883-13c4-454f-a3d3-45aa0db29f82/kube-rbac-proxy/0.log" Oct 01 07:08:00 crc kubenswrapper[4661]: I1001 07:08:00.835006 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5b4fc86755-8fsv8_12785d9c-9cdb-4c80-bc4b-ee398e655992/kube-rbac-proxy/0.log" Oct 01 07:08:00 crc kubenswrapper[4661]: I1001 07:08:00.838274 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-8bc4775b5-fmrmk_29b41883-13c4-454f-a3d3-45aa0db29f82/manager/0.log" Oct 01 07:08:00 crc kubenswrapper[4661]: I1001 07:08:00.964620 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5b4fc86755-8fsv8_12785d9c-9cdb-4c80-bc4b-ee398e655992/manager/0.log" Oct 01 07:08:01 crc kubenswrapper[4661]: I1001 07:08:01.024152 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-679b4759bb-7w27g_001f8e02-2d35-4c68-88ae-4d732588213c/kube-rbac-proxy/0.log" Oct 01 07:08:01 crc kubenswrapper[4661]: I1001 07:08:01.075162 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-679b4759bb-7w27g_001f8e02-2d35-4c68-88ae-4d732588213c/manager/0.log" Oct 01 07:08:01 crc kubenswrapper[4661]: I1001 07:08:01.179157 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-5c8fdc4d5c-9msgq_0e131827-21a2-4464-80d3-7528c1d8c52a/kube-rbac-proxy/0.log" Oct 01 07:08:01 crc kubenswrapper[4661]: I1001 07:08:01.370895 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6f589bc7f7-7fjjd_50f20957-2408-4a65-a326-e3b76051b38b/kube-rbac-proxy/0.log" Oct 01 07:08:01 crc kubenswrapper[4661]: I1001 07:08:01.395356 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-5c8fdc4d5c-9msgq_0e131827-21a2-4464-80d3-7528c1d8c52a/manager/0.log" Oct 01 07:08:01 crc kubenswrapper[4661]: I1001 07:08:01.396796 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6f589bc7f7-7fjjd_50f20957-2408-4a65-a326-e3b76051b38b/manager/0.log" Oct 01 07:08:01 crc kubenswrapper[4661]: I1001 07:08:01.546134 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-59d7dc95cf-6l9mf_c0601966-5144-438f-a862-3f397e7064a4/kube-rbac-proxy/0.log" Oct 01 07:08:01 crc kubenswrapper[4661]: I1001 07:08:01.608249 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-59d7dc95cf-6l9mf_c0601966-5144-438f-a862-3f397e7064a4/manager/0.log" Oct 01 07:08:01 crc kubenswrapper[4661]: I1001 07:08:01.663848 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-b7cf8cb5f-dxp6r_f020bcbc-c80b-4465-9733-204a86325234/kube-rbac-proxy/0.log" Oct 01 07:08:01 crc kubenswrapper[4661]: I1001 07:08:01.750207 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-b7cf8cb5f-dxp6r_f020bcbc-c80b-4465-9733-204a86325234/manager/0.log" Oct 01 07:08:01 crc kubenswrapper[4661]: I1001 07:08:01.806267 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67bf5bb885-nwsxg_9d7506fd-1133-4927-872d-c68c525cba62/kube-rbac-proxy/0.log" Oct 01 07:08:01 crc kubenswrapper[4661]: I1001 07:08:01.830752 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67bf5bb885-nwsxg_9d7506fd-1133-4927-872d-c68c525cba62/manager/0.log" Oct 01 07:08:01 crc kubenswrapper[4661]: I1001 07:08:01.992898 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6b96467f46-8kwhx_2e8d1048-762f-4737-82c2-c6244072bf9d/kube-rbac-proxy/0.log" Oct 01 07:08:02 crc kubenswrapper[4661]: I1001 07:08:02.020793 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6b96467f46-8kwhx_2e8d1048-762f-4737-82c2-c6244072bf9d/manager/0.log" Oct 01 07:08:02 crc kubenswrapper[4661]: I1001 07:08:02.124172 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-79f9fc9fd8-68scz_34d73c0f-65b8-4718-8409-849ac307168f/kube-rbac-proxy/0.log" Oct 01 07:08:02 crc kubenswrapper[4661]: I1001 07:08:02.208149 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-79f9fc9fd8-68scz_34d73c0f-65b8-4718-8409-849ac307168f/manager/0.log" Oct 01 07:08:02 crc kubenswrapper[4661]: I1001 07:08:02.224940 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6fb7d6b8bf-f248r_5fbac0a5-84db-4228-9c2c-93d8d551044d/kube-rbac-proxy/0.log" Oct 01 07:08:02 crc kubenswrapper[4661]: I1001 07:08:02.295778 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6fb7d6b8bf-f248r_5fbac0a5-84db-4228-9c2c-93d8d551044d/manager/0.log" Oct 01 07:08:02 crc kubenswrapper[4661]: I1001 07:08:02.372119 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k_8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb/kube-rbac-proxy/0.log" Oct 01 07:08:02 crc kubenswrapper[4661]: I1001 07:08:02.383563 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k_8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb/manager/0.log" Oct 01 07:08:02 crc kubenswrapper[4661]: I1001 07:08:02.597729 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-8648d97544-tlsf9_68f9b331-6beb-4cda-884c-326180cb52c8/kube-rbac-proxy/0.log" Oct 01 07:08:02 crc kubenswrapper[4661]: I1001 07:08:02.662766 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-8d79f897-dcwff_12436698-76a2-4877-8d43-8af3c769ec32/kube-rbac-proxy/0.log" Oct 01 07:08:02 crc kubenswrapper[4661]: I1001 07:08:02.950553 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-cvddl_2dc07b8e-5f1a-46cc-a33d-ffc63239d05a/registry-server/0.log" Oct 01 07:08:02 crc kubenswrapper[4661]: I1001 07:08:02.985397 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-8d79f897-dcwff_12436698-76a2-4877-8d43-8af3c769ec32/operator/0.log" Oct 01 07:08:03 crc kubenswrapper[4661]: I1001 07:08:03.126840 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-84c745747f-lhhfj_01c6b4d6-55f6-4837-af54-9eb764262d03/kube-rbac-proxy/0.log" Oct 01 07:08:03 crc kubenswrapper[4661]: I1001 07:08:03.148834 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-84c745747f-lhhfj_01c6b4d6-55f6-4837-af54-9eb764262d03/manager/0.log" Oct 01 07:08:03 crc kubenswrapper[4661]: I1001 07:08:03.210809 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-598c4c8547-xs8zb_3ad43452-d673-42a7-8495-887b5e93cacb/kube-rbac-proxy/0.log" Oct 01 07:08:03 crc kubenswrapper[4661]: I1001 07:08:03.359256 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-5n77c_ff6dc8a8-7a0a-47db-9aec-dbc1f8236beb/operator/0.log" Oct 01 07:08:03 crc kubenswrapper[4661]: I1001 07:08:03.398136 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-598c4c8547-xs8zb_3ad43452-d673-42a7-8495-887b5e93cacb/manager/0.log" Oct 01 07:08:03 crc kubenswrapper[4661]: I1001 07:08:03.527302 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-657c6b68c7-cv29g_a9e55ad4-d1a5-4830-96db-02c95384650c/kube-rbac-proxy/0.log" Oct 01 07:08:03 crc kubenswrapper[4661]: I1001 07:08:03.599700 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-657c6b68c7-cv29g_a9e55ad4-d1a5-4830-96db-02c95384650c/manager/0.log" Oct 01 07:08:03 crc kubenswrapper[4661]: I1001 07:08:03.667370 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-cb66d6b59-cd2rk_5826e57e-36e6-43e0-8141-7e6e3ae936a6/kube-rbac-proxy/0.log" Oct 01 07:08:03 crc kubenswrapper[4661]: I1001 07:08:03.860777 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-6bb97fcf96-jsj4d_838837a9-4076-41ba-91e4-44055ce7c97a/kube-rbac-proxy/0.log" Oct 01 07:08:03 crc kubenswrapper[4661]: I1001 07:08:03.885899 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-8648d97544-tlsf9_68f9b331-6beb-4cda-884c-326180cb52c8/manager/0.log" Oct 01 07:08:03 crc kubenswrapper[4661]: I1001 07:08:03.931542 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-6bb97fcf96-jsj4d_838837a9-4076-41ba-91e4-44055ce7c97a/manager/0.log" Oct 01 07:08:03 crc kubenswrapper[4661]: I1001 07:08:03.982304 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-cb66d6b59-cd2rk_5826e57e-36e6-43e0-8141-7e6e3ae936a6/manager/0.log" Oct 01 07:08:04 crc kubenswrapper[4661]: I1001 07:08:04.102184 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-75756dd4d9-2hjs8_1789dd27-2b5e-46e6-9260-affd4daf86cb/kube-rbac-proxy/0.log" Oct 01 07:08:04 crc kubenswrapper[4661]: I1001 07:08:04.145697 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-75756dd4d9-2hjs8_1789dd27-2b5e-46e6-9260-affd4daf86cb/manager/0.log" Oct 01 07:08:04 crc kubenswrapper[4661]: I1001 07:08:04.757816 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:08:04 crc kubenswrapper[4661]: E1001 07:08:04.758118 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:08:15 crc kubenswrapper[4661]: I1001 07:08:15.758268 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:08:15 crc kubenswrapper[4661]: E1001 07:08:15.759546 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:08:20 crc kubenswrapper[4661]: I1001 07:08:20.878591 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-kqlm6_61ab52df-4ef3-4f24-a8f3-01c7fef8c99f/control-plane-machine-set-operator/0.log" Oct 01 07:08:21 crc kubenswrapper[4661]: I1001 07:08:21.066421 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-sppsj_0c21d97e-1221-464d-ae54-56ea6e626e00/kube-rbac-proxy/0.log" Oct 01 07:08:21 crc kubenswrapper[4661]: I1001 07:08:21.102108 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-sppsj_0c21d97e-1221-464d-ae54-56ea6e626e00/machine-api-operator/0.log" Oct 01 07:08:29 crc kubenswrapper[4661]: I1001 07:08:29.757537 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:08:29 crc kubenswrapper[4661]: E1001 07:08:29.758663 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:08:34 crc kubenswrapper[4661]: I1001 07:08:34.589039 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-jpkgm_c676e4ec-6d7a-48a8-a54e-cb33046615f3/cert-manager-controller/0.log" Oct 01 07:08:34 crc kubenswrapper[4661]: I1001 07:08:34.696531 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-qx25f_c4de8059-947f-4d97-ad30-a2a3e1081b19/cert-manager-cainjector/0.log" Oct 01 07:08:34 crc kubenswrapper[4661]: I1001 07:08:34.759315 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-4tqmh_8206bae1-8eaa-4f6b-9531-9c200316c97c/cert-manager-webhook/0.log" Oct 01 07:08:42 crc kubenswrapper[4661]: I1001 07:08:42.757583 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:08:43 crc kubenswrapper[4661]: I1001 07:08:43.543862 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"28526f72b1fe65b0c2fcdfe6790ed07574eed5d583552550ccd6635fb178a38e"} Oct 01 07:08:47 crc kubenswrapper[4661]: I1001 07:08:47.727609 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-xpx7n_db1944d4-6048-4e0c-86fc-3f37d4a653bc/nmstate-console-plugin/0.log" Oct 01 07:08:47 crc kubenswrapper[4661]: I1001 07:08:47.885949 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-jp5md_49f9f7ff-ea7a-42c4-ad95-e8a05841ab36/nmstate-handler/0.log" Oct 01 07:08:47 crc kubenswrapper[4661]: I1001 07:08:47.954128 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-9nbcr_06d34118-bb4a-4b6f-9637-2fdac6465088/kube-rbac-proxy/0.log" Oct 01 07:08:47 crc kubenswrapper[4661]: I1001 07:08:47.978790 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-9nbcr_06d34118-bb4a-4b6f-9637-2fdac6465088/nmstate-metrics/0.log" Oct 01 07:08:48 crc kubenswrapper[4661]: I1001 07:08:48.075109 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-6gctp_185fc7e0-5985-4401-9b50-60a661708075/nmstate-operator/0.log" Oct 01 07:08:48 crc kubenswrapper[4661]: I1001 07:08:48.166372 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-qkzcj_b4aa2a05-edba-4a48-a854-8c05535af455/nmstate-webhook/0.log" Oct 01 07:09:02 crc kubenswrapper[4661]: I1001 07:09:02.408205 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-xfbgw_27288f70-f17e-4362-b115-c0c69e26aa91/kube-rbac-proxy/0.log" Oct 01 07:09:02 crc kubenswrapper[4661]: I1001 07:09:02.577567 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-xfbgw_27288f70-f17e-4362-b115-c0c69e26aa91/controller/0.log" Oct 01 07:09:02 crc kubenswrapper[4661]: I1001 07:09:02.661480 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-frr-files/0.log" Oct 01 07:09:02 crc kubenswrapper[4661]: I1001 07:09:02.778394 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-frr-files/0.log" Oct 01 07:09:02 crc kubenswrapper[4661]: I1001 07:09:02.801899 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-reloader/0.log" Oct 01 07:09:02 crc kubenswrapper[4661]: I1001 07:09:02.860424 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-metrics/0.log" Oct 01 07:09:02 crc kubenswrapper[4661]: I1001 07:09:02.862882 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-reloader/0.log" Oct 01 07:09:03 crc kubenswrapper[4661]: I1001 07:09:03.023756 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-metrics/0.log" Oct 01 07:09:03 crc kubenswrapper[4661]: I1001 07:09:03.036559 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-frr-files/0.log" Oct 01 07:09:03 crc kubenswrapper[4661]: I1001 07:09:03.053835 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-metrics/0.log" Oct 01 07:09:03 crc kubenswrapper[4661]: I1001 07:09:03.063504 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-reloader/0.log" Oct 01 07:09:03 crc kubenswrapper[4661]: I1001 07:09:03.203583 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-reloader/0.log" Oct 01 07:09:03 crc kubenswrapper[4661]: I1001 07:09:03.224846 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-metrics/0.log" Oct 01 07:09:03 crc kubenswrapper[4661]: I1001 07:09:03.228431 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-frr-files/0.log" Oct 01 07:09:03 crc kubenswrapper[4661]: I1001 07:09:03.258273 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/controller/0.log" Oct 01 07:09:03 crc kubenswrapper[4661]: I1001 07:09:03.379170 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/kube-rbac-proxy/0.log" Oct 01 07:09:03 crc kubenswrapper[4661]: I1001 07:09:03.410619 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/frr-metrics/0.log" Oct 01 07:09:03 crc kubenswrapper[4661]: I1001 07:09:03.472950 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/kube-rbac-proxy-frr/0.log" Oct 01 07:09:03 crc kubenswrapper[4661]: I1001 07:09:03.587083 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/reloader/0.log" Oct 01 07:09:03 crc kubenswrapper[4661]: I1001 07:09:03.672891 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-zp8cf_41e4d93f-6473-4aff-a0b0-e76588bdf2a6/frr-k8s-webhook-server/0.log" Oct 01 07:09:03 crc kubenswrapper[4661]: I1001 07:09:03.883270 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5648555f8f-nqmlz_db1e2c00-b138-4835-a53b-4cb169f585eb/manager/0.log" Oct 01 07:09:04 crc kubenswrapper[4661]: I1001 07:09:04.024147 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-68b4bd4bc7-stfmz_fda1aa7d-8361-476a-b52c-db60416d47c5/webhook-server/0.log" Oct 01 07:09:04 crc kubenswrapper[4661]: I1001 07:09:04.143350 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-x4czl_41216ce7-4a7f-43c7-995e-081c9849f9bf/kube-rbac-proxy/0.log" Oct 01 07:09:04 crc kubenswrapper[4661]: I1001 07:09:04.718221 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-x4czl_41216ce7-4a7f-43c7-995e-081c9849f9bf/speaker/0.log" Oct 01 07:09:05 crc kubenswrapper[4661]: I1001 07:09:05.057835 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/frr/0.log" Oct 01 07:09:18 crc kubenswrapper[4661]: I1001 07:09:18.281940 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk_4a49ff2d-3e51-4e56-9062-58c965ab69a3/util/0.log" Oct 01 07:09:18 crc kubenswrapper[4661]: I1001 07:09:18.457968 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk_4a49ff2d-3e51-4e56-9062-58c965ab69a3/util/0.log" Oct 01 07:09:18 crc kubenswrapper[4661]: I1001 07:09:18.493574 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk_4a49ff2d-3e51-4e56-9062-58c965ab69a3/pull/0.log" Oct 01 07:09:18 crc kubenswrapper[4661]: I1001 07:09:18.513481 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk_4a49ff2d-3e51-4e56-9062-58c965ab69a3/pull/0.log" Oct 01 07:09:18 crc kubenswrapper[4661]: I1001 07:09:18.689013 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk_4a49ff2d-3e51-4e56-9062-58c965ab69a3/pull/0.log" Oct 01 07:09:18 crc kubenswrapper[4661]: I1001 07:09:18.691959 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk_4a49ff2d-3e51-4e56-9062-58c965ab69a3/util/0.log" Oct 01 07:09:18 crc kubenswrapper[4661]: I1001 07:09:18.696758 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk_4a49ff2d-3e51-4e56-9062-58c965ab69a3/extract/0.log" Oct 01 07:09:18 crc kubenswrapper[4661]: I1001 07:09:18.874336 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk_a5baaec9-0520-4cc9-9056-f4729497a734/util/0.log" Oct 01 07:09:19 crc kubenswrapper[4661]: I1001 07:09:19.082621 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk_a5baaec9-0520-4cc9-9056-f4729497a734/pull/0.log" Oct 01 07:09:19 crc kubenswrapper[4661]: I1001 07:09:19.085254 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk_a5baaec9-0520-4cc9-9056-f4729497a734/pull/0.log" Oct 01 07:09:19 crc kubenswrapper[4661]: I1001 07:09:19.087806 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk_a5baaec9-0520-4cc9-9056-f4729497a734/util/0.log" Oct 01 07:09:19 crc kubenswrapper[4661]: I1001 07:09:19.237066 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk_a5baaec9-0520-4cc9-9056-f4729497a734/util/0.log" Oct 01 07:09:19 crc kubenswrapper[4661]: I1001 07:09:19.240015 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk_a5baaec9-0520-4cc9-9056-f4729497a734/pull/0.log" Oct 01 07:09:19 crc kubenswrapper[4661]: I1001 07:09:19.467825 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk_a5baaec9-0520-4cc9-9056-f4729497a734/extract/0.log" Oct 01 07:09:19 crc kubenswrapper[4661]: I1001 07:09:19.583670 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwgw_de60a77e-3fb7-4777-94de-54b40db66c7a/extract-utilities/0.log" Oct 01 07:09:19 crc kubenswrapper[4661]: I1001 07:09:19.746203 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwgw_de60a77e-3fb7-4777-94de-54b40db66c7a/extract-utilities/0.log" Oct 01 07:09:19 crc kubenswrapper[4661]: I1001 07:09:19.785576 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwgw_de60a77e-3fb7-4777-94de-54b40db66c7a/extract-content/0.log" Oct 01 07:09:19 crc kubenswrapper[4661]: I1001 07:09:19.818529 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwgw_de60a77e-3fb7-4777-94de-54b40db66c7a/extract-content/0.log" Oct 01 07:09:20 crc kubenswrapper[4661]: I1001 07:09:20.018154 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwgw_de60a77e-3fb7-4777-94de-54b40db66c7a/extract-content/0.log" Oct 01 07:09:20 crc kubenswrapper[4661]: I1001 07:09:20.023111 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwgw_de60a77e-3fb7-4777-94de-54b40db66c7a/extract-utilities/0.log" Oct 01 07:09:20 crc kubenswrapper[4661]: I1001 07:09:20.245419 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-lqlkm_918474ca-d1fb-44a1-a07f-96c072ded353/extract-utilities/0.log" Oct 01 07:09:20 crc kubenswrapper[4661]: I1001 07:09:20.447539 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-lqlkm_918474ca-d1fb-44a1-a07f-96c072ded353/extract-utilities/0.log" Oct 01 07:09:20 crc kubenswrapper[4661]: I1001 07:09:20.473691 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-lqlkm_918474ca-d1fb-44a1-a07f-96c072ded353/extract-content/0.log" Oct 01 07:09:20 crc kubenswrapper[4661]: I1001 07:09:20.517814 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-lqlkm_918474ca-d1fb-44a1-a07f-96c072ded353/extract-content/0.log" Oct 01 07:09:20 crc kubenswrapper[4661]: I1001 07:09:20.661555 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-lqlkm_918474ca-d1fb-44a1-a07f-96c072ded353/extract-content/0.log" Oct 01 07:09:20 crc kubenswrapper[4661]: I1001 07:09:20.679455 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwgw_de60a77e-3fb7-4777-94de-54b40db66c7a/registry-server/0.log" Oct 01 07:09:20 crc kubenswrapper[4661]: I1001 07:09:20.710449 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-lqlkm_918474ca-d1fb-44a1-a07f-96c072ded353/extract-utilities/0.log" Oct 01 07:09:20 crc kubenswrapper[4661]: I1001 07:09:20.916626 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr_d7850164-3b1e-4bd1-b8c8-691c54963d36/util/0.log" Oct 01 07:09:21 crc kubenswrapper[4661]: I1001 07:09:21.200201 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr_d7850164-3b1e-4bd1-b8c8-691c54963d36/pull/0.log" Oct 01 07:09:21 crc kubenswrapper[4661]: I1001 07:09:21.206170 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr_d7850164-3b1e-4bd1-b8c8-691c54963d36/pull/0.log" Oct 01 07:09:21 crc kubenswrapper[4661]: I1001 07:09:21.221497 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr_d7850164-3b1e-4bd1-b8c8-691c54963d36/util/0.log" Oct 01 07:09:21 crc kubenswrapper[4661]: I1001 07:09:21.367674 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-lqlkm_918474ca-d1fb-44a1-a07f-96c072ded353/registry-server/0.log" Oct 01 07:09:21 crc kubenswrapper[4661]: I1001 07:09:21.414566 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr_d7850164-3b1e-4bd1-b8c8-691c54963d36/util/0.log" Oct 01 07:09:21 crc kubenswrapper[4661]: I1001 07:09:21.446563 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr_d7850164-3b1e-4bd1-b8c8-691c54963d36/pull/0.log" Oct 01 07:09:21 crc kubenswrapper[4661]: I1001 07:09:21.447581 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr_d7850164-3b1e-4bd1-b8c8-691c54963d36/extract/0.log" Oct 01 07:09:21 crc kubenswrapper[4661]: I1001 07:09:21.596184 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-xpqj9_af543e28-92e4-4c71-a1dc-1478f2c25169/marketplace-operator/0.log" Oct 01 07:09:21 crc kubenswrapper[4661]: I1001 07:09:21.693199 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tc8mt_1ec4e027-d0b0-4931-a551-2bfbd8769337/extract-utilities/0.log" Oct 01 07:09:21 crc kubenswrapper[4661]: I1001 07:09:21.869744 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tc8mt_1ec4e027-d0b0-4931-a551-2bfbd8769337/extract-content/0.log" Oct 01 07:09:21 crc kubenswrapper[4661]: I1001 07:09:21.875401 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tc8mt_1ec4e027-d0b0-4931-a551-2bfbd8769337/extract-utilities/0.log" Oct 01 07:09:21 crc kubenswrapper[4661]: I1001 07:09:21.916400 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tc8mt_1ec4e027-d0b0-4931-a551-2bfbd8769337/extract-content/0.log" Oct 01 07:09:22 crc kubenswrapper[4661]: I1001 07:09:22.110578 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tc8mt_1ec4e027-d0b0-4931-a551-2bfbd8769337/extract-utilities/0.log" Oct 01 07:09:22 crc kubenswrapper[4661]: I1001 07:09:22.115190 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tc8mt_1ec4e027-d0b0-4931-a551-2bfbd8769337/extract-content/0.log" Oct 01 07:09:22 crc kubenswrapper[4661]: I1001 07:09:22.206914 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-l56gs_2dccb83e-7a19-4707-b2cc-8c5f68ebc261/extract-utilities/0.log" Oct 01 07:09:22 crc kubenswrapper[4661]: I1001 07:09:22.317109 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tc8mt_1ec4e027-d0b0-4931-a551-2bfbd8769337/registry-server/0.log" Oct 01 07:09:22 crc kubenswrapper[4661]: I1001 07:09:22.338644 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-l56gs_2dccb83e-7a19-4707-b2cc-8c5f68ebc261/extract-utilities/0.log" Oct 01 07:09:22 crc kubenswrapper[4661]: I1001 07:09:22.380970 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-l56gs_2dccb83e-7a19-4707-b2cc-8c5f68ebc261/extract-content/0.log" Oct 01 07:09:22 crc kubenswrapper[4661]: I1001 07:09:22.410887 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-l56gs_2dccb83e-7a19-4707-b2cc-8c5f68ebc261/extract-content/0.log" Oct 01 07:09:22 crc kubenswrapper[4661]: I1001 07:09:22.518346 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-l56gs_2dccb83e-7a19-4707-b2cc-8c5f68ebc261/extract-content/0.log" Oct 01 07:09:22 crc kubenswrapper[4661]: I1001 07:09:22.593705 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-l56gs_2dccb83e-7a19-4707-b2cc-8c5f68ebc261/extract-utilities/0.log" Oct 01 07:09:23 crc kubenswrapper[4661]: I1001 07:09:23.061224 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-l56gs_2dccb83e-7a19-4707-b2cc-8c5f68ebc261/registry-server/0.log" Oct 01 07:09:36 crc kubenswrapper[4661]: I1001 07:09:36.312708 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-7c8cf85677-8qwx2_e4e8d27f-fbab-4c45-b182-73df1cef9061/prometheus-operator/0.log" Oct 01 07:09:36 crc kubenswrapper[4661]: I1001 07:09:36.648734 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6d9594f8bb-gnmb5_6f995c9b-5efa-4d54-be17-7d67186446c3/prometheus-operator-admission-webhook/0.log" Oct 01 07:09:36 crc kubenswrapper[4661]: I1001 07:09:36.679404 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6d9594f8bb-wfszf_cb7f3b2f-e219-4709-85e3-8b1df4b288bd/prometheus-operator-admission-webhook/0.log" Oct 01 07:09:36 crc kubenswrapper[4661]: I1001 07:09:36.829957 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-cc5f78dfc-scssh_a7062884-4a27-4396-b3f5-698aceda68d2/operator/0.log" Oct 01 07:09:37 crc kubenswrapper[4661]: I1001 07:09:37.014865 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-54bc95c9fb-pnpg7_58ef1745-3b19-4508-b099-100418c1a6d7/perses-operator/0.log" Oct 01 07:09:38 crc kubenswrapper[4661]: I1001 07:09:38.708883 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pcf4n"] Oct 01 07:09:38 crc kubenswrapper[4661]: E1001 07:09:38.709843 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bc37c45-23e4-4cb2-b87b-5e880d7a0d67" containerName="container-00" Oct 01 07:09:38 crc kubenswrapper[4661]: I1001 07:09:38.709859 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bc37c45-23e4-4cb2-b87b-5e880d7a0d67" containerName="container-00" Oct 01 07:09:38 crc kubenswrapper[4661]: I1001 07:09:38.710033 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bc37c45-23e4-4cb2-b87b-5e880d7a0d67" containerName="container-00" Oct 01 07:09:38 crc kubenswrapper[4661]: I1001 07:09:38.711378 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pcf4n" Oct 01 07:09:38 crc kubenswrapper[4661]: I1001 07:09:38.726259 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pcf4n"] Oct 01 07:09:38 crc kubenswrapper[4661]: I1001 07:09:38.863861 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43eb5d26-a29b-4431-ba5d-b16b0a83492d-utilities\") pod \"redhat-marketplace-pcf4n\" (UID: \"43eb5d26-a29b-4431-ba5d-b16b0a83492d\") " pod="openshift-marketplace/redhat-marketplace-pcf4n" Oct 01 07:09:38 crc kubenswrapper[4661]: I1001 07:09:38.863943 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43eb5d26-a29b-4431-ba5d-b16b0a83492d-catalog-content\") pod \"redhat-marketplace-pcf4n\" (UID: \"43eb5d26-a29b-4431-ba5d-b16b0a83492d\") " pod="openshift-marketplace/redhat-marketplace-pcf4n" Oct 01 07:09:38 crc kubenswrapper[4661]: I1001 07:09:38.864066 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbjjw\" (UniqueName: \"kubernetes.io/projected/43eb5d26-a29b-4431-ba5d-b16b0a83492d-kube-api-access-rbjjw\") pod \"redhat-marketplace-pcf4n\" (UID: \"43eb5d26-a29b-4431-ba5d-b16b0a83492d\") " pod="openshift-marketplace/redhat-marketplace-pcf4n" Oct 01 07:09:38 crc kubenswrapper[4661]: I1001 07:09:38.965712 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbjjw\" (UniqueName: \"kubernetes.io/projected/43eb5d26-a29b-4431-ba5d-b16b0a83492d-kube-api-access-rbjjw\") pod \"redhat-marketplace-pcf4n\" (UID: \"43eb5d26-a29b-4431-ba5d-b16b0a83492d\") " pod="openshift-marketplace/redhat-marketplace-pcf4n" Oct 01 07:09:38 crc kubenswrapper[4661]: I1001 07:09:38.965820 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43eb5d26-a29b-4431-ba5d-b16b0a83492d-utilities\") pod \"redhat-marketplace-pcf4n\" (UID: \"43eb5d26-a29b-4431-ba5d-b16b0a83492d\") " pod="openshift-marketplace/redhat-marketplace-pcf4n" Oct 01 07:09:38 crc kubenswrapper[4661]: I1001 07:09:38.965879 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43eb5d26-a29b-4431-ba5d-b16b0a83492d-catalog-content\") pod \"redhat-marketplace-pcf4n\" (UID: \"43eb5d26-a29b-4431-ba5d-b16b0a83492d\") " pod="openshift-marketplace/redhat-marketplace-pcf4n" Oct 01 07:09:38 crc kubenswrapper[4661]: I1001 07:09:38.966395 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43eb5d26-a29b-4431-ba5d-b16b0a83492d-catalog-content\") pod \"redhat-marketplace-pcf4n\" (UID: \"43eb5d26-a29b-4431-ba5d-b16b0a83492d\") " pod="openshift-marketplace/redhat-marketplace-pcf4n" Oct 01 07:09:38 crc kubenswrapper[4661]: I1001 07:09:38.967254 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43eb5d26-a29b-4431-ba5d-b16b0a83492d-utilities\") pod \"redhat-marketplace-pcf4n\" (UID: \"43eb5d26-a29b-4431-ba5d-b16b0a83492d\") " pod="openshift-marketplace/redhat-marketplace-pcf4n" Oct 01 07:09:38 crc kubenswrapper[4661]: I1001 07:09:38.986237 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbjjw\" (UniqueName: \"kubernetes.io/projected/43eb5d26-a29b-4431-ba5d-b16b0a83492d-kube-api-access-rbjjw\") pod \"redhat-marketplace-pcf4n\" (UID: \"43eb5d26-a29b-4431-ba5d-b16b0a83492d\") " pod="openshift-marketplace/redhat-marketplace-pcf4n" Oct 01 07:09:39 crc kubenswrapper[4661]: I1001 07:09:39.028565 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pcf4n" Oct 01 07:09:39 crc kubenswrapper[4661]: I1001 07:09:39.577215 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pcf4n"] Oct 01 07:09:40 crc kubenswrapper[4661]: I1001 07:09:40.123146 4661 generic.go:334] "Generic (PLEG): container finished" podID="43eb5d26-a29b-4431-ba5d-b16b0a83492d" containerID="18d8fdddbdbedba949ffeec2fd87a1901f22fb1437fc5bd84693e0f6b6f3183d" exitCode=0 Oct 01 07:09:40 crc kubenswrapper[4661]: I1001 07:09:40.123242 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcf4n" event={"ID":"43eb5d26-a29b-4431-ba5d-b16b0a83492d","Type":"ContainerDied","Data":"18d8fdddbdbedba949ffeec2fd87a1901f22fb1437fc5bd84693e0f6b6f3183d"} Oct 01 07:09:40 crc kubenswrapper[4661]: I1001 07:09:40.123556 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcf4n" event={"ID":"43eb5d26-a29b-4431-ba5d-b16b0a83492d","Type":"ContainerStarted","Data":"ca3383251db4fa1e6be0ee10e2ff1c049e26da75be08abb2d0983f5f305d4c4f"} Oct 01 07:09:41 crc kubenswrapper[4661]: I1001 07:09:41.145440 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcf4n" event={"ID":"43eb5d26-a29b-4431-ba5d-b16b0a83492d","Type":"ContainerStarted","Data":"a117ec87588212a5478113fd0fa3102bd412ea7d64d4fbc6155e943ea1dde4c4"} Oct 01 07:09:42 crc kubenswrapper[4661]: I1001 07:09:42.159065 4661 generic.go:334] "Generic (PLEG): container finished" podID="43eb5d26-a29b-4431-ba5d-b16b0a83492d" containerID="a117ec87588212a5478113fd0fa3102bd412ea7d64d4fbc6155e943ea1dde4c4" exitCode=0 Oct 01 07:09:42 crc kubenswrapper[4661]: I1001 07:09:42.159221 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcf4n" event={"ID":"43eb5d26-a29b-4431-ba5d-b16b0a83492d","Type":"ContainerDied","Data":"a117ec87588212a5478113fd0fa3102bd412ea7d64d4fbc6155e943ea1dde4c4"} Oct 01 07:09:43 crc kubenswrapper[4661]: I1001 07:09:43.171287 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcf4n" event={"ID":"43eb5d26-a29b-4431-ba5d-b16b0a83492d","Type":"ContainerStarted","Data":"02d1c5c493c3c6250f908502cb46b292646106b22238f4b40cd51e0687a6a2d8"} Oct 01 07:09:43 crc kubenswrapper[4661]: I1001 07:09:43.189491 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pcf4n" podStartSLOduration=2.532373902 podStartE2EDuration="5.189474416s" podCreationTimestamp="2025-10-01 07:09:38 +0000 UTC" firstStartedPulling="2025-10-01 07:09:40.125472228 +0000 UTC m=+6029.063450842" lastFinishedPulling="2025-10-01 07:09:42.782572742 +0000 UTC m=+6031.720551356" observedRunningTime="2025-10-01 07:09:43.187467751 +0000 UTC m=+6032.125446365" watchObservedRunningTime="2025-10-01 07:09:43.189474416 +0000 UTC m=+6032.127453030" Oct 01 07:09:44 crc kubenswrapper[4661]: I1001 07:09:44.493738 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-d7w8b"] Oct 01 07:09:44 crc kubenswrapper[4661]: I1001 07:09:44.496388 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d7w8b" Oct 01 07:09:44 crc kubenswrapper[4661]: I1001 07:09:44.526943 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-d7w8b"] Oct 01 07:09:44 crc kubenswrapper[4661]: I1001 07:09:44.584575 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4vp5\" (UniqueName: \"kubernetes.io/projected/b7add215-1951-4bb6-933a-4d545e53ba40-kube-api-access-n4vp5\") pod \"certified-operators-d7w8b\" (UID: \"b7add215-1951-4bb6-933a-4d545e53ba40\") " pod="openshift-marketplace/certified-operators-d7w8b" Oct 01 07:09:44 crc kubenswrapper[4661]: I1001 07:09:44.584633 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7add215-1951-4bb6-933a-4d545e53ba40-utilities\") pod \"certified-operators-d7w8b\" (UID: \"b7add215-1951-4bb6-933a-4d545e53ba40\") " pod="openshift-marketplace/certified-operators-d7w8b" Oct 01 07:09:44 crc kubenswrapper[4661]: I1001 07:09:44.584806 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7add215-1951-4bb6-933a-4d545e53ba40-catalog-content\") pod \"certified-operators-d7w8b\" (UID: \"b7add215-1951-4bb6-933a-4d545e53ba40\") " pod="openshift-marketplace/certified-operators-d7w8b" Oct 01 07:09:44 crc kubenswrapper[4661]: I1001 07:09:44.686863 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4vp5\" (UniqueName: \"kubernetes.io/projected/b7add215-1951-4bb6-933a-4d545e53ba40-kube-api-access-n4vp5\") pod \"certified-operators-d7w8b\" (UID: \"b7add215-1951-4bb6-933a-4d545e53ba40\") " pod="openshift-marketplace/certified-operators-d7w8b" Oct 01 07:09:44 crc kubenswrapper[4661]: I1001 07:09:44.686932 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7add215-1951-4bb6-933a-4d545e53ba40-utilities\") pod \"certified-operators-d7w8b\" (UID: \"b7add215-1951-4bb6-933a-4d545e53ba40\") " pod="openshift-marketplace/certified-operators-d7w8b" Oct 01 07:09:44 crc kubenswrapper[4661]: I1001 07:09:44.686982 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7add215-1951-4bb6-933a-4d545e53ba40-catalog-content\") pod \"certified-operators-d7w8b\" (UID: \"b7add215-1951-4bb6-933a-4d545e53ba40\") " pod="openshift-marketplace/certified-operators-d7w8b" Oct 01 07:09:44 crc kubenswrapper[4661]: I1001 07:09:44.687436 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7add215-1951-4bb6-933a-4d545e53ba40-catalog-content\") pod \"certified-operators-d7w8b\" (UID: \"b7add215-1951-4bb6-933a-4d545e53ba40\") " pod="openshift-marketplace/certified-operators-d7w8b" Oct 01 07:09:44 crc kubenswrapper[4661]: I1001 07:09:44.687574 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7add215-1951-4bb6-933a-4d545e53ba40-utilities\") pod \"certified-operators-d7w8b\" (UID: \"b7add215-1951-4bb6-933a-4d545e53ba40\") " pod="openshift-marketplace/certified-operators-d7w8b" Oct 01 07:09:44 crc kubenswrapper[4661]: I1001 07:09:44.711470 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4vp5\" (UniqueName: \"kubernetes.io/projected/b7add215-1951-4bb6-933a-4d545e53ba40-kube-api-access-n4vp5\") pod \"certified-operators-d7w8b\" (UID: \"b7add215-1951-4bb6-933a-4d545e53ba40\") " pod="openshift-marketplace/certified-operators-d7w8b" Oct 01 07:09:44 crc kubenswrapper[4661]: I1001 07:09:44.817418 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d7w8b" Oct 01 07:09:45 crc kubenswrapper[4661]: I1001 07:09:45.420567 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-d7w8b"] Oct 01 07:09:46 crc kubenswrapper[4661]: I1001 07:09:46.197596 4661 generic.go:334] "Generic (PLEG): container finished" podID="b7add215-1951-4bb6-933a-4d545e53ba40" containerID="241a2f10c5d30814c7fa063bacfa12637b290d68a8c79d65d9ffb5d73335927b" exitCode=0 Oct 01 07:09:46 crc kubenswrapper[4661]: I1001 07:09:46.197704 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d7w8b" event={"ID":"b7add215-1951-4bb6-933a-4d545e53ba40","Type":"ContainerDied","Data":"241a2f10c5d30814c7fa063bacfa12637b290d68a8c79d65d9ffb5d73335927b"} Oct 01 07:09:46 crc kubenswrapper[4661]: I1001 07:09:46.197901 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d7w8b" event={"ID":"b7add215-1951-4bb6-933a-4d545e53ba40","Type":"ContainerStarted","Data":"506a1c31d767679580a896143b336755b4b1b52249ec9104e1cd8711be6ea040"} Oct 01 07:09:46 crc kubenswrapper[4661]: I1001 07:09:46.199581 4661 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 07:09:47 crc kubenswrapper[4661]: I1001 07:09:47.209606 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d7w8b" event={"ID":"b7add215-1951-4bb6-933a-4d545e53ba40","Type":"ContainerStarted","Data":"b82229f164edc1d416adc8a84eadf12c4a7a5007b25bd4ff487bd0fd443ed952"} Oct 01 07:09:48 crc kubenswrapper[4661]: I1001 07:09:48.223257 4661 generic.go:334] "Generic (PLEG): container finished" podID="b7add215-1951-4bb6-933a-4d545e53ba40" containerID="b82229f164edc1d416adc8a84eadf12c4a7a5007b25bd4ff487bd0fd443ed952" exitCode=0 Oct 01 07:09:48 crc kubenswrapper[4661]: I1001 07:09:48.223347 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d7w8b" event={"ID":"b7add215-1951-4bb6-933a-4d545e53ba40","Type":"ContainerDied","Data":"b82229f164edc1d416adc8a84eadf12c4a7a5007b25bd4ff487bd0fd443ed952"} Oct 01 07:09:49 crc kubenswrapper[4661]: I1001 07:09:49.028781 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pcf4n" Oct 01 07:09:49 crc kubenswrapper[4661]: I1001 07:09:49.030477 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pcf4n" Oct 01 07:09:49 crc kubenswrapper[4661]: I1001 07:09:49.086907 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pcf4n" Oct 01 07:09:49 crc kubenswrapper[4661]: I1001 07:09:49.236018 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d7w8b" event={"ID":"b7add215-1951-4bb6-933a-4d545e53ba40","Type":"ContainerStarted","Data":"750c2a05fa5314230e6ae3c913d18d640e4ccbc032923c775e6b481ddd005075"} Oct 01 07:09:49 crc kubenswrapper[4661]: I1001 07:09:49.260703 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-d7w8b" podStartSLOduration=2.805816031 podStartE2EDuration="5.260687848s" podCreationTimestamp="2025-10-01 07:09:44 +0000 UTC" firstStartedPulling="2025-10-01 07:09:46.199393783 +0000 UTC m=+6035.137372397" lastFinishedPulling="2025-10-01 07:09:48.65426558 +0000 UTC m=+6037.592244214" observedRunningTime="2025-10-01 07:09:49.252096383 +0000 UTC m=+6038.190074997" watchObservedRunningTime="2025-10-01 07:09:49.260687848 +0000 UTC m=+6038.198666462" Oct 01 07:09:49 crc kubenswrapper[4661]: I1001 07:09:49.291283 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pcf4n" Oct 01 07:09:51 crc kubenswrapper[4661]: I1001 07:09:51.509166 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pcf4n"] Oct 01 07:09:52 crc kubenswrapper[4661]: I1001 07:09:52.261734 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pcf4n" podUID="43eb5d26-a29b-4431-ba5d-b16b0a83492d" containerName="registry-server" containerID="cri-o://02d1c5c493c3c6250f908502cb46b292646106b22238f4b40cd51e0687a6a2d8" gracePeriod=2 Oct 01 07:09:52 crc kubenswrapper[4661]: I1001 07:09:52.783180 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pcf4n" Oct 01 07:09:52 crc kubenswrapper[4661]: I1001 07:09:52.957192 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43eb5d26-a29b-4431-ba5d-b16b0a83492d-catalog-content\") pod \"43eb5d26-a29b-4431-ba5d-b16b0a83492d\" (UID: \"43eb5d26-a29b-4431-ba5d-b16b0a83492d\") " Oct 01 07:09:52 crc kubenswrapper[4661]: I1001 07:09:52.957704 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43eb5d26-a29b-4431-ba5d-b16b0a83492d-utilities\") pod \"43eb5d26-a29b-4431-ba5d-b16b0a83492d\" (UID: \"43eb5d26-a29b-4431-ba5d-b16b0a83492d\") " Oct 01 07:09:52 crc kubenswrapper[4661]: I1001 07:09:52.958440 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43eb5d26-a29b-4431-ba5d-b16b0a83492d-utilities" (OuterVolumeSpecName: "utilities") pod "43eb5d26-a29b-4431-ba5d-b16b0a83492d" (UID: "43eb5d26-a29b-4431-ba5d-b16b0a83492d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 07:09:52 crc kubenswrapper[4661]: I1001 07:09:52.958530 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbjjw\" (UniqueName: \"kubernetes.io/projected/43eb5d26-a29b-4431-ba5d-b16b0a83492d-kube-api-access-rbjjw\") pod \"43eb5d26-a29b-4431-ba5d-b16b0a83492d\" (UID: \"43eb5d26-a29b-4431-ba5d-b16b0a83492d\") " Oct 01 07:09:52 crc kubenswrapper[4661]: I1001 07:09:52.960564 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43eb5d26-a29b-4431-ba5d-b16b0a83492d-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 07:09:52 crc kubenswrapper[4661]: I1001 07:09:52.975390 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43eb5d26-a29b-4431-ba5d-b16b0a83492d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "43eb5d26-a29b-4431-ba5d-b16b0a83492d" (UID: "43eb5d26-a29b-4431-ba5d-b16b0a83492d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 07:09:52 crc kubenswrapper[4661]: I1001 07:09:52.979461 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43eb5d26-a29b-4431-ba5d-b16b0a83492d-kube-api-access-rbjjw" (OuterVolumeSpecName: "kube-api-access-rbjjw") pod "43eb5d26-a29b-4431-ba5d-b16b0a83492d" (UID: "43eb5d26-a29b-4431-ba5d-b16b0a83492d"). InnerVolumeSpecName "kube-api-access-rbjjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 07:09:53 crc kubenswrapper[4661]: I1001 07:09:53.062118 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbjjw\" (UniqueName: \"kubernetes.io/projected/43eb5d26-a29b-4431-ba5d-b16b0a83492d-kube-api-access-rbjjw\") on node \"crc\" DevicePath \"\"" Oct 01 07:09:53 crc kubenswrapper[4661]: I1001 07:09:53.062248 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43eb5d26-a29b-4431-ba5d-b16b0a83492d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 07:09:53 crc kubenswrapper[4661]: I1001 07:09:53.273255 4661 generic.go:334] "Generic (PLEG): container finished" podID="43eb5d26-a29b-4431-ba5d-b16b0a83492d" containerID="02d1c5c493c3c6250f908502cb46b292646106b22238f4b40cd51e0687a6a2d8" exitCode=0 Oct 01 07:09:53 crc kubenswrapper[4661]: I1001 07:09:53.273306 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcf4n" event={"ID":"43eb5d26-a29b-4431-ba5d-b16b0a83492d","Type":"ContainerDied","Data":"02d1c5c493c3c6250f908502cb46b292646106b22238f4b40cd51e0687a6a2d8"} Oct 01 07:09:53 crc kubenswrapper[4661]: I1001 07:09:53.273343 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcf4n" event={"ID":"43eb5d26-a29b-4431-ba5d-b16b0a83492d","Type":"ContainerDied","Data":"ca3383251db4fa1e6be0ee10e2ff1c049e26da75be08abb2d0983f5f305d4c4f"} Oct 01 07:09:53 crc kubenswrapper[4661]: I1001 07:09:53.273311 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pcf4n" Oct 01 07:09:53 crc kubenswrapper[4661]: I1001 07:09:53.273367 4661 scope.go:117] "RemoveContainer" containerID="02d1c5c493c3c6250f908502cb46b292646106b22238f4b40cd51e0687a6a2d8" Oct 01 07:09:53 crc kubenswrapper[4661]: I1001 07:09:53.311470 4661 scope.go:117] "RemoveContainer" containerID="a117ec87588212a5478113fd0fa3102bd412ea7d64d4fbc6155e943ea1dde4c4" Oct 01 07:09:53 crc kubenswrapper[4661]: I1001 07:09:53.344406 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pcf4n"] Oct 01 07:09:53 crc kubenswrapper[4661]: I1001 07:09:53.368336 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pcf4n"] Oct 01 07:09:53 crc kubenswrapper[4661]: I1001 07:09:53.373862 4661 scope.go:117] "RemoveContainer" containerID="18d8fdddbdbedba949ffeec2fd87a1901f22fb1437fc5bd84693e0f6b6f3183d" Oct 01 07:09:53 crc kubenswrapper[4661]: I1001 07:09:53.400768 4661 scope.go:117] "RemoveContainer" containerID="02d1c5c493c3c6250f908502cb46b292646106b22238f4b40cd51e0687a6a2d8" Oct 01 07:09:53 crc kubenswrapper[4661]: E1001 07:09:53.401386 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02d1c5c493c3c6250f908502cb46b292646106b22238f4b40cd51e0687a6a2d8\": container with ID starting with 02d1c5c493c3c6250f908502cb46b292646106b22238f4b40cd51e0687a6a2d8 not found: ID does not exist" containerID="02d1c5c493c3c6250f908502cb46b292646106b22238f4b40cd51e0687a6a2d8" Oct 01 07:09:53 crc kubenswrapper[4661]: I1001 07:09:53.401444 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02d1c5c493c3c6250f908502cb46b292646106b22238f4b40cd51e0687a6a2d8"} err="failed to get container status \"02d1c5c493c3c6250f908502cb46b292646106b22238f4b40cd51e0687a6a2d8\": rpc error: code = NotFound desc = could not find container \"02d1c5c493c3c6250f908502cb46b292646106b22238f4b40cd51e0687a6a2d8\": container with ID starting with 02d1c5c493c3c6250f908502cb46b292646106b22238f4b40cd51e0687a6a2d8 not found: ID does not exist" Oct 01 07:09:53 crc kubenswrapper[4661]: I1001 07:09:53.401478 4661 scope.go:117] "RemoveContainer" containerID="a117ec87588212a5478113fd0fa3102bd412ea7d64d4fbc6155e943ea1dde4c4" Oct 01 07:09:53 crc kubenswrapper[4661]: E1001 07:09:53.401903 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a117ec87588212a5478113fd0fa3102bd412ea7d64d4fbc6155e943ea1dde4c4\": container with ID starting with a117ec87588212a5478113fd0fa3102bd412ea7d64d4fbc6155e943ea1dde4c4 not found: ID does not exist" containerID="a117ec87588212a5478113fd0fa3102bd412ea7d64d4fbc6155e943ea1dde4c4" Oct 01 07:09:53 crc kubenswrapper[4661]: I1001 07:09:53.401946 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a117ec87588212a5478113fd0fa3102bd412ea7d64d4fbc6155e943ea1dde4c4"} err="failed to get container status \"a117ec87588212a5478113fd0fa3102bd412ea7d64d4fbc6155e943ea1dde4c4\": rpc error: code = NotFound desc = could not find container \"a117ec87588212a5478113fd0fa3102bd412ea7d64d4fbc6155e943ea1dde4c4\": container with ID starting with a117ec87588212a5478113fd0fa3102bd412ea7d64d4fbc6155e943ea1dde4c4 not found: ID does not exist" Oct 01 07:09:53 crc kubenswrapper[4661]: I1001 07:09:53.401973 4661 scope.go:117] "RemoveContainer" containerID="18d8fdddbdbedba949ffeec2fd87a1901f22fb1437fc5bd84693e0f6b6f3183d" Oct 01 07:09:53 crc kubenswrapper[4661]: E1001 07:09:53.402210 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18d8fdddbdbedba949ffeec2fd87a1901f22fb1437fc5bd84693e0f6b6f3183d\": container with ID starting with 18d8fdddbdbedba949ffeec2fd87a1901f22fb1437fc5bd84693e0f6b6f3183d not found: ID does not exist" containerID="18d8fdddbdbedba949ffeec2fd87a1901f22fb1437fc5bd84693e0f6b6f3183d" Oct 01 07:09:53 crc kubenswrapper[4661]: I1001 07:09:53.402235 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18d8fdddbdbedba949ffeec2fd87a1901f22fb1437fc5bd84693e0f6b6f3183d"} err="failed to get container status \"18d8fdddbdbedba949ffeec2fd87a1901f22fb1437fc5bd84693e0f6b6f3183d\": rpc error: code = NotFound desc = could not find container \"18d8fdddbdbedba949ffeec2fd87a1901f22fb1437fc5bd84693e0f6b6f3183d\": container with ID starting with 18d8fdddbdbedba949ffeec2fd87a1901f22fb1437fc5bd84693e0f6b6f3183d not found: ID does not exist" Oct 01 07:09:53 crc kubenswrapper[4661]: I1001 07:09:53.767047 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43eb5d26-a29b-4431-ba5d-b16b0a83492d" path="/var/lib/kubelet/pods/43eb5d26-a29b-4431-ba5d-b16b0a83492d/volumes" Oct 01 07:09:54 crc kubenswrapper[4661]: I1001 07:09:54.817778 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-d7w8b" Oct 01 07:09:54 crc kubenswrapper[4661]: I1001 07:09:54.818177 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-d7w8b" Oct 01 07:09:54 crc kubenswrapper[4661]: I1001 07:09:54.871705 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-d7w8b" Oct 01 07:09:55 crc kubenswrapper[4661]: I1001 07:09:55.362026 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-d7w8b" Oct 01 07:09:56 crc kubenswrapper[4661]: I1001 07:09:56.483210 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-d7w8b"] Oct 01 07:09:57 crc kubenswrapper[4661]: I1001 07:09:57.319733 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-d7w8b" podUID="b7add215-1951-4bb6-933a-4d545e53ba40" containerName="registry-server" containerID="cri-o://750c2a05fa5314230e6ae3c913d18d640e4ccbc032923c775e6b481ddd005075" gracePeriod=2 Oct 01 07:09:57 crc kubenswrapper[4661]: I1001 07:09:57.855773 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d7w8b" Oct 01 07:09:57 crc kubenswrapper[4661]: I1001 07:09:57.968854 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7add215-1951-4bb6-933a-4d545e53ba40-utilities\") pod \"b7add215-1951-4bb6-933a-4d545e53ba40\" (UID: \"b7add215-1951-4bb6-933a-4d545e53ba40\") " Oct 01 07:09:57 crc kubenswrapper[4661]: I1001 07:09:57.968988 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n4vp5\" (UniqueName: \"kubernetes.io/projected/b7add215-1951-4bb6-933a-4d545e53ba40-kube-api-access-n4vp5\") pod \"b7add215-1951-4bb6-933a-4d545e53ba40\" (UID: \"b7add215-1951-4bb6-933a-4d545e53ba40\") " Oct 01 07:09:57 crc kubenswrapper[4661]: I1001 07:09:57.969162 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7add215-1951-4bb6-933a-4d545e53ba40-catalog-content\") pod \"b7add215-1951-4bb6-933a-4d545e53ba40\" (UID: \"b7add215-1951-4bb6-933a-4d545e53ba40\") " Oct 01 07:09:57 crc kubenswrapper[4661]: I1001 07:09:57.970466 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7add215-1951-4bb6-933a-4d545e53ba40-utilities" (OuterVolumeSpecName: "utilities") pod "b7add215-1951-4bb6-933a-4d545e53ba40" (UID: "b7add215-1951-4bb6-933a-4d545e53ba40"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 07:09:57 crc kubenswrapper[4661]: I1001 07:09:57.976803 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7add215-1951-4bb6-933a-4d545e53ba40-kube-api-access-n4vp5" (OuterVolumeSpecName: "kube-api-access-n4vp5") pod "b7add215-1951-4bb6-933a-4d545e53ba40" (UID: "b7add215-1951-4bb6-933a-4d545e53ba40"). InnerVolumeSpecName "kube-api-access-n4vp5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 07:09:58 crc kubenswrapper[4661]: I1001 07:09:58.021297 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7add215-1951-4bb6-933a-4d545e53ba40-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b7add215-1951-4bb6-933a-4d545e53ba40" (UID: "b7add215-1951-4bb6-933a-4d545e53ba40"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 07:09:58 crc kubenswrapper[4661]: I1001 07:09:58.071268 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7add215-1951-4bb6-933a-4d545e53ba40-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 07:09:58 crc kubenswrapper[4661]: I1001 07:09:58.071300 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7add215-1951-4bb6-933a-4d545e53ba40-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 07:09:58 crc kubenswrapper[4661]: I1001 07:09:58.071314 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n4vp5\" (UniqueName: \"kubernetes.io/projected/b7add215-1951-4bb6-933a-4d545e53ba40-kube-api-access-n4vp5\") on node \"crc\" DevicePath \"\"" Oct 01 07:09:58 crc kubenswrapper[4661]: I1001 07:09:58.331993 4661 generic.go:334] "Generic (PLEG): container finished" podID="b7add215-1951-4bb6-933a-4d545e53ba40" containerID="750c2a05fa5314230e6ae3c913d18d640e4ccbc032923c775e6b481ddd005075" exitCode=0 Oct 01 07:09:58 crc kubenswrapper[4661]: I1001 07:09:58.332036 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d7w8b" event={"ID":"b7add215-1951-4bb6-933a-4d545e53ba40","Type":"ContainerDied","Data":"750c2a05fa5314230e6ae3c913d18d640e4ccbc032923c775e6b481ddd005075"} Oct 01 07:09:58 crc kubenswrapper[4661]: I1001 07:09:58.332063 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d7w8b" event={"ID":"b7add215-1951-4bb6-933a-4d545e53ba40","Type":"ContainerDied","Data":"506a1c31d767679580a896143b336755b4b1b52249ec9104e1cd8711be6ea040"} Oct 01 07:09:58 crc kubenswrapper[4661]: I1001 07:09:58.332082 4661 scope.go:117] "RemoveContainer" containerID="750c2a05fa5314230e6ae3c913d18d640e4ccbc032923c775e6b481ddd005075" Oct 01 07:09:58 crc kubenswrapper[4661]: I1001 07:09:58.332081 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d7w8b" Oct 01 07:09:58 crc kubenswrapper[4661]: I1001 07:09:58.372620 4661 scope.go:117] "RemoveContainer" containerID="b82229f164edc1d416adc8a84eadf12c4a7a5007b25bd4ff487bd0fd443ed952" Oct 01 07:09:58 crc kubenswrapper[4661]: I1001 07:09:58.423427 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-d7w8b"] Oct 01 07:09:58 crc kubenswrapper[4661]: I1001 07:09:58.424290 4661 scope.go:117] "RemoveContainer" containerID="241a2f10c5d30814c7fa063bacfa12637b290d68a8c79d65d9ffb5d73335927b" Oct 01 07:09:58 crc kubenswrapper[4661]: I1001 07:09:58.442471 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-d7w8b"] Oct 01 07:09:58 crc kubenswrapper[4661]: I1001 07:09:58.469966 4661 scope.go:117] "RemoveContainer" containerID="750c2a05fa5314230e6ae3c913d18d640e4ccbc032923c775e6b481ddd005075" Oct 01 07:09:58 crc kubenswrapper[4661]: E1001 07:09:58.471263 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"750c2a05fa5314230e6ae3c913d18d640e4ccbc032923c775e6b481ddd005075\": container with ID starting with 750c2a05fa5314230e6ae3c913d18d640e4ccbc032923c775e6b481ddd005075 not found: ID does not exist" containerID="750c2a05fa5314230e6ae3c913d18d640e4ccbc032923c775e6b481ddd005075" Oct 01 07:09:58 crc kubenswrapper[4661]: I1001 07:09:58.471315 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"750c2a05fa5314230e6ae3c913d18d640e4ccbc032923c775e6b481ddd005075"} err="failed to get container status \"750c2a05fa5314230e6ae3c913d18d640e4ccbc032923c775e6b481ddd005075\": rpc error: code = NotFound desc = could not find container \"750c2a05fa5314230e6ae3c913d18d640e4ccbc032923c775e6b481ddd005075\": container with ID starting with 750c2a05fa5314230e6ae3c913d18d640e4ccbc032923c775e6b481ddd005075 not found: ID does not exist" Oct 01 07:09:58 crc kubenswrapper[4661]: I1001 07:09:58.471347 4661 scope.go:117] "RemoveContainer" containerID="b82229f164edc1d416adc8a84eadf12c4a7a5007b25bd4ff487bd0fd443ed952" Oct 01 07:09:58 crc kubenswrapper[4661]: E1001 07:09:58.471760 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b82229f164edc1d416adc8a84eadf12c4a7a5007b25bd4ff487bd0fd443ed952\": container with ID starting with b82229f164edc1d416adc8a84eadf12c4a7a5007b25bd4ff487bd0fd443ed952 not found: ID does not exist" containerID="b82229f164edc1d416adc8a84eadf12c4a7a5007b25bd4ff487bd0fd443ed952" Oct 01 07:09:58 crc kubenswrapper[4661]: I1001 07:09:58.471815 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b82229f164edc1d416adc8a84eadf12c4a7a5007b25bd4ff487bd0fd443ed952"} err="failed to get container status \"b82229f164edc1d416adc8a84eadf12c4a7a5007b25bd4ff487bd0fd443ed952\": rpc error: code = NotFound desc = could not find container \"b82229f164edc1d416adc8a84eadf12c4a7a5007b25bd4ff487bd0fd443ed952\": container with ID starting with b82229f164edc1d416adc8a84eadf12c4a7a5007b25bd4ff487bd0fd443ed952 not found: ID does not exist" Oct 01 07:09:58 crc kubenswrapper[4661]: I1001 07:09:58.471841 4661 scope.go:117] "RemoveContainer" containerID="241a2f10c5d30814c7fa063bacfa12637b290d68a8c79d65d9ffb5d73335927b" Oct 01 07:09:58 crc kubenswrapper[4661]: E1001 07:09:58.472219 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"241a2f10c5d30814c7fa063bacfa12637b290d68a8c79d65d9ffb5d73335927b\": container with ID starting with 241a2f10c5d30814c7fa063bacfa12637b290d68a8c79d65d9ffb5d73335927b not found: ID does not exist" containerID="241a2f10c5d30814c7fa063bacfa12637b290d68a8c79d65d9ffb5d73335927b" Oct 01 07:09:58 crc kubenswrapper[4661]: I1001 07:09:58.472289 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"241a2f10c5d30814c7fa063bacfa12637b290d68a8c79d65d9ffb5d73335927b"} err="failed to get container status \"241a2f10c5d30814c7fa063bacfa12637b290d68a8c79d65d9ffb5d73335927b\": rpc error: code = NotFound desc = could not find container \"241a2f10c5d30814c7fa063bacfa12637b290d68a8c79d65d9ffb5d73335927b\": container with ID starting with 241a2f10c5d30814c7fa063bacfa12637b290d68a8c79d65d9ffb5d73335927b not found: ID does not exist" Oct 01 07:09:59 crc kubenswrapper[4661]: I1001 07:09:59.771548 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7add215-1951-4bb6-933a-4d545e53ba40" path="/var/lib/kubelet/pods/b7add215-1951-4bb6-933a-4d545e53ba40/volumes" Oct 01 07:11:04 crc kubenswrapper[4661]: I1001 07:11:04.309184 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 07:11:04 crc kubenswrapper[4661]: I1001 07:11:04.309940 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 07:11:34 crc kubenswrapper[4661]: I1001 07:11:34.309287 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 07:11:34 crc kubenswrapper[4661]: I1001 07:11:34.309964 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 07:11:53 crc kubenswrapper[4661]: I1001 07:11:53.702183 4661 generic.go:334] "Generic (PLEG): container finished" podID="ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d" containerID="a7002fbc9b3dff924b446636e09186459c843c775bad698827b77c2bc4f8aeb6" exitCode=0 Oct 01 07:11:53 crc kubenswrapper[4661]: I1001 07:11:53.702324 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fsd44/must-gather-rtdmc" event={"ID":"ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d","Type":"ContainerDied","Data":"a7002fbc9b3dff924b446636e09186459c843c775bad698827b77c2bc4f8aeb6"} Oct 01 07:11:53 crc kubenswrapper[4661]: I1001 07:11:53.704014 4661 scope.go:117] "RemoveContainer" containerID="a7002fbc9b3dff924b446636e09186459c843c775bad698827b77c2bc4f8aeb6" Oct 01 07:11:54 crc kubenswrapper[4661]: I1001 07:11:54.329041 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-fsd44_must-gather-rtdmc_ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d/gather/0.log" Oct 01 07:12:02 crc kubenswrapper[4661]: I1001 07:12:02.687411 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-fsd44/must-gather-rtdmc"] Oct 01 07:12:02 crc kubenswrapper[4661]: I1001 07:12:02.688749 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-fsd44/must-gather-rtdmc" podUID="ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d" containerName="copy" containerID="cri-o://4279895b982ae4300bb59d85e896484ba8763c8f9bf7676f6046fc1931fac061" gracePeriod=2 Oct 01 07:12:02 crc kubenswrapper[4661]: I1001 07:12:02.699083 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-fsd44/must-gather-rtdmc"] Oct 01 07:12:02 crc kubenswrapper[4661]: I1001 07:12:02.824747 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-fsd44_must-gather-rtdmc_ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d/copy/0.log" Oct 01 07:12:02 crc kubenswrapper[4661]: I1001 07:12:02.825603 4661 generic.go:334] "Generic (PLEG): container finished" podID="ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d" containerID="4279895b982ae4300bb59d85e896484ba8763c8f9bf7676f6046fc1931fac061" exitCode=143 Oct 01 07:12:02 crc kubenswrapper[4661]: E1001 07:12:02.873849 4661 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba9b5dfd_f400_4915_bfa0_8d7c6d2e816d.slice/crio-4279895b982ae4300bb59d85e896484ba8763c8f9bf7676f6046fc1931fac061.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba9b5dfd_f400_4915_bfa0_8d7c6d2e816d.slice/crio-conmon-4279895b982ae4300bb59d85e896484ba8763c8f9bf7676f6046fc1931fac061.scope\": RecentStats: unable to find data in memory cache]" Oct 01 07:12:03 crc kubenswrapper[4661]: I1001 07:12:03.189676 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-fsd44_must-gather-rtdmc_ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d/copy/0.log" Oct 01 07:12:03 crc kubenswrapper[4661]: I1001 07:12:03.190254 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fsd44/must-gather-rtdmc" Oct 01 07:12:03 crc kubenswrapper[4661]: I1001 07:12:03.276474 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d-must-gather-output\") pod \"ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d\" (UID: \"ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d\") " Oct 01 07:12:03 crc kubenswrapper[4661]: I1001 07:12:03.276927 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8qsl\" (UniqueName: \"kubernetes.io/projected/ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d-kube-api-access-c8qsl\") pod \"ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d\" (UID: \"ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d\") " Oct 01 07:12:03 crc kubenswrapper[4661]: I1001 07:12:03.283279 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d-kube-api-access-c8qsl" (OuterVolumeSpecName: "kube-api-access-c8qsl") pod "ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d" (UID: "ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d"). InnerVolumeSpecName "kube-api-access-c8qsl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 07:12:03 crc kubenswrapper[4661]: I1001 07:12:03.379540 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8qsl\" (UniqueName: \"kubernetes.io/projected/ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d-kube-api-access-c8qsl\") on node \"crc\" DevicePath \"\"" Oct 01 07:12:03 crc kubenswrapper[4661]: I1001 07:12:03.497967 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d" (UID: "ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 07:12:03 crc kubenswrapper[4661]: I1001 07:12:03.585227 4661 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 01 07:12:03 crc kubenswrapper[4661]: I1001 07:12:03.824770 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d" path="/var/lib/kubelet/pods/ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d/volumes" Oct 01 07:12:03 crc kubenswrapper[4661]: I1001 07:12:03.849338 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-fsd44_must-gather-rtdmc_ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d/copy/0.log" Oct 01 07:12:03 crc kubenswrapper[4661]: I1001 07:12:03.850155 4661 scope.go:117] "RemoveContainer" containerID="4279895b982ae4300bb59d85e896484ba8763c8f9bf7676f6046fc1931fac061" Oct 01 07:12:03 crc kubenswrapper[4661]: I1001 07:12:03.850292 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fsd44/must-gather-rtdmc" Oct 01 07:12:03 crc kubenswrapper[4661]: I1001 07:12:03.905463 4661 scope.go:117] "RemoveContainer" containerID="a7002fbc9b3dff924b446636e09186459c843c775bad698827b77c2bc4f8aeb6" Oct 01 07:12:04 crc kubenswrapper[4661]: I1001 07:12:04.309215 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 07:12:04 crc kubenswrapper[4661]: I1001 07:12:04.309546 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 07:12:04 crc kubenswrapper[4661]: I1001 07:12:04.309588 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 07:12:04 crc kubenswrapper[4661]: I1001 07:12:04.310301 4661 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"28526f72b1fe65b0c2fcdfe6790ed07574eed5d583552550ccd6635fb178a38e"} pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 07:12:04 crc kubenswrapper[4661]: I1001 07:12:04.310358 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" containerID="cri-o://28526f72b1fe65b0c2fcdfe6790ed07574eed5d583552550ccd6635fb178a38e" gracePeriod=600 Oct 01 07:12:04 crc kubenswrapper[4661]: I1001 07:12:04.869840 4661 generic.go:334] "Generic (PLEG): container finished" podID="7584c4bc-4202-487e-a2b4-4319f428a792" containerID="28526f72b1fe65b0c2fcdfe6790ed07574eed5d583552550ccd6635fb178a38e" exitCode=0 Oct 01 07:12:04 crc kubenswrapper[4661]: I1001 07:12:04.869881 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerDied","Data":"28526f72b1fe65b0c2fcdfe6790ed07574eed5d583552550ccd6635fb178a38e"} Oct 01 07:12:04 crc kubenswrapper[4661]: I1001 07:12:04.869939 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4"} Oct 01 07:12:04 crc kubenswrapper[4661]: I1001 07:12:04.869959 4661 scope.go:117] "RemoveContainer" containerID="dac5d3eafeb79ec73abf11e53d08df6f5808692fdeb58addebebace35b909743" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.400152 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-pqw4p/must-gather-pkdb2"] Oct 01 07:12:29 crc kubenswrapper[4661]: E1001 07:12:29.401459 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43eb5d26-a29b-4431-ba5d-b16b0a83492d" containerName="registry-server" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.401482 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="43eb5d26-a29b-4431-ba5d-b16b0a83492d" containerName="registry-server" Oct 01 07:12:29 crc kubenswrapper[4661]: E1001 07:12:29.401511 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43eb5d26-a29b-4431-ba5d-b16b0a83492d" containerName="extract-content" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.401522 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="43eb5d26-a29b-4431-ba5d-b16b0a83492d" containerName="extract-content" Oct 01 07:12:29 crc kubenswrapper[4661]: E1001 07:12:29.401545 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43eb5d26-a29b-4431-ba5d-b16b0a83492d" containerName="extract-utilities" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.401557 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="43eb5d26-a29b-4431-ba5d-b16b0a83492d" containerName="extract-utilities" Oct 01 07:12:29 crc kubenswrapper[4661]: E1001 07:12:29.401571 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7add215-1951-4bb6-933a-4d545e53ba40" containerName="registry-server" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.401580 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7add215-1951-4bb6-933a-4d545e53ba40" containerName="registry-server" Oct 01 07:12:29 crc kubenswrapper[4661]: E1001 07:12:29.401614 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d" containerName="copy" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.401626 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d" containerName="copy" Oct 01 07:12:29 crc kubenswrapper[4661]: E1001 07:12:29.401679 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d" containerName="gather" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.401691 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d" containerName="gather" Oct 01 07:12:29 crc kubenswrapper[4661]: E1001 07:12:29.401728 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7add215-1951-4bb6-933a-4d545e53ba40" containerName="extract-utilities" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.401741 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7add215-1951-4bb6-933a-4d545e53ba40" containerName="extract-utilities" Oct 01 07:12:29 crc kubenswrapper[4661]: E1001 07:12:29.401756 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7add215-1951-4bb6-933a-4d545e53ba40" containerName="extract-content" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.401767 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7add215-1951-4bb6-933a-4d545e53ba40" containerName="extract-content" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.402064 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="43eb5d26-a29b-4431-ba5d-b16b0a83492d" containerName="registry-server" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.402092 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7add215-1951-4bb6-933a-4d545e53ba40" containerName="registry-server" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.402122 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d" containerName="copy" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.402144 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba9b5dfd-f400-4915-bfa0-8d7c6d2e816d" containerName="gather" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.404084 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pqw4p/must-gather-pkdb2" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.414075 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-pqw4p"/"openshift-service-ca.crt" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.419749 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-pqw4p"/"kube-root-ca.crt" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.422038 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-pqw4p/must-gather-pkdb2"] Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.577211 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/61afe435-c553-43ce-876a-b44bb52bc6d2-must-gather-output\") pod \"must-gather-pkdb2\" (UID: \"61afe435-c553-43ce-876a-b44bb52bc6d2\") " pod="openshift-must-gather-pqw4p/must-gather-pkdb2" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.577357 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhv6q\" (UniqueName: \"kubernetes.io/projected/61afe435-c553-43ce-876a-b44bb52bc6d2-kube-api-access-jhv6q\") pod \"must-gather-pkdb2\" (UID: \"61afe435-c553-43ce-876a-b44bb52bc6d2\") " pod="openshift-must-gather-pqw4p/must-gather-pkdb2" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.679362 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhv6q\" (UniqueName: \"kubernetes.io/projected/61afe435-c553-43ce-876a-b44bb52bc6d2-kube-api-access-jhv6q\") pod \"must-gather-pkdb2\" (UID: \"61afe435-c553-43ce-876a-b44bb52bc6d2\") " pod="openshift-must-gather-pqw4p/must-gather-pkdb2" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.679900 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/61afe435-c553-43ce-876a-b44bb52bc6d2-must-gather-output\") pod \"must-gather-pkdb2\" (UID: \"61afe435-c553-43ce-876a-b44bb52bc6d2\") " pod="openshift-must-gather-pqw4p/must-gather-pkdb2" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.680346 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/61afe435-c553-43ce-876a-b44bb52bc6d2-must-gather-output\") pod \"must-gather-pkdb2\" (UID: \"61afe435-c553-43ce-876a-b44bb52bc6d2\") " pod="openshift-must-gather-pqw4p/must-gather-pkdb2" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.702759 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhv6q\" (UniqueName: \"kubernetes.io/projected/61afe435-c553-43ce-876a-b44bb52bc6d2-kube-api-access-jhv6q\") pod \"must-gather-pkdb2\" (UID: \"61afe435-c553-43ce-876a-b44bb52bc6d2\") " pod="openshift-must-gather-pqw4p/must-gather-pkdb2" Oct 01 07:12:29 crc kubenswrapper[4661]: I1001 07:12:29.736042 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pqw4p/must-gather-pkdb2" Oct 01 07:12:30 crc kubenswrapper[4661]: I1001 07:12:30.249668 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-pqw4p/must-gather-pkdb2"] Oct 01 07:12:31 crc kubenswrapper[4661]: I1001 07:12:31.183593 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pqw4p/must-gather-pkdb2" event={"ID":"61afe435-c553-43ce-876a-b44bb52bc6d2","Type":"ContainerStarted","Data":"892fd50f7a35b6fa587eb5624911465618bcaee23d4cf73ffb2fdd129e1652a9"} Oct 01 07:12:31 crc kubenswrapper[4661]: I1001 07:12:31.184000 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pqw4p/must-gather-pkdb2" event={"ID":"61afe435-c553-43ce-876a-b44bb52bc6d2","Type":"ContainerStarted","Data":"b0722a1eb4e0625f4f3f8cf1145c507df188d76a9e25be46adb09a6db906f73b"} Oct 01 07:12:31 crc kubenswrapper[4661]: I1001 07:12:31.184018 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pqw4p/must-gather-pkdb2" event={"ID":"61afe435-c553-43ce-876a-b44bb52bc6d2","Type":"ContainerStarted","Data":"de85e25e47b405acbc35f4b8df30ac6f5c211617795db80d852d7267bdc3a5e4"} Oct 01 07:12:31 crc kubenswrapper[4661]: I1001 07:12:31.204056 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-pqw4p/must-gather-pkdb2" podStartSLOduration=2.204029948 podStartE2EDuration="2.204029948s" podCreationTimestamp="2025-10-01 07:12:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 07:12:31.197959802 +0000 UTC m=+6200.135938416" watchObservedRunningTime="2025-10-01 07:12:31.204029948 +0000 UTC m=+6200.142008592" Oct 01 07:12:34 crc kubenswrapper[4661]: I1001 07:12:34.268988 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-pqw4p/crc-debug-xwsm4"] Oct 01 07:12:34 crc kubenswrapper[4661]: I1001 07:12:34.271044 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pqw4p/crc-debug-xwsm4" Oct 01 07:12:34 crc kubenswrapper[4661]: I1001 07:12:34.274267 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-pqw4p"/"default-dockercfg-9njmw" Oct 01 07:12:34 crc kubenswrapper[4661]: I1001 07:12:34.371029 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9flg7\" (UniqueName: \"kubernetes.io/projected/ad604d70-115a-47d6-b1b3-0a08089d3e86-kube-api-access-9flg7\") pod \"crc-debug-xwsm4\" (UID: \"ad604d70-115a-47d6-b1b3-0a08089d3e86\") " pod="openshift-must-gather-pqw4p/crc-debug-xwsm4" Oct 01 07:12:34 crc kubenswrapper[4661]: I1001 07:12:34.371466 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ad604d70-115a-47d6-b1b3-0a08089d3e86-host\") pod \"crc-debug-xwsm4\" (UID: \"ad604d70-115a-47d6-b1b3-0a08089d3e86\") " pod="openshift-must-gather-pqw4p/crc-debug-xwsm4" Oct 01 07:12:34 crc kubenswrapper[4661]: I1001 07:12:34.473662 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ad604d70-115a-47d6-b1b3-0a08089d3e86-host\") pod \"crc-debug-xwsm4\" (UID: \"ad604d70-115a-47d6-b1b3-0a08089d3e86\") " pod="openshift-must-gather-pqw4p/crc-debug-xwsm4" Oct 01 07:12:34 crc kubenswrapper[4661]: I1001 07:12:34.473831 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ad604d70-115a-47d6-b1b3-0a08089d3e86-host\") pod \"crc-debug-xwsm4\" (UID: \"ad604d70-115a-47d6-b1b3-0a08089d3e86\") " pod="openshift-must-gather-pqw4p/crc-debug-xwsm4" Oct 01 07:12:34 crc kubenswrapper[4661]: I1001 07:12:34.474506 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9flg7\" (UniqueName: \"kubernetes.io/projected/ad604d70-115a-47d6-b1b3-0a08089d3e86-kube-api-access-9flg7\") pod \"crc-debug-xwsm4\" (UID: \"ad604d70-115a-47d6-b1b3-0a08089d3e86\") " pod="openshift-must-gather-pqw4p/crc-debug-xwsm4" Oct 01 07:12:34 crc kubenswrapper[4661]: I1001 07:12:34.499186 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9flg7\" (UniqueName: \"kubernetes.io/projected/ad604d70-115a-47d6-b1b3-0a08089d3e86-kube-api-access-9flg7\") pod \"crc-debug-xwsm4\" (UID: \"ad604d70-115a-47d6-b1b3-0a08089d3e86\") " pod="openshift-must-gather-pqw4p/crc-debug-xwsm4" Oct 01 07:12:34 crc kubenswrapper[4661]: I1001 07:12:34.598434 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pqw4p/crc-debug-xwsm4" Oct 01 07:12:34 crc kubenswrapper[4661]: W1001 07:12:34.627682 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad604d70_115a_47d6_b1b3_0a08089d3e86.slice/crio-fdeac182becf1acd16ff1fb5bfcb86a730396b73a3f16c93d5984c9e56ea87db WatchSource:0}: Error finding container fdeac182becf1acd16ff1fb5bfcb86a730396b73a3f16c93d5984c9e56ea87db: Status 404 returned error can't find the container with id fdeac182becf1acd16ff1fb5bfcb86a730396b73a3f16c93d5984c9e56ea87db Oct 01 07:12:35 crc kubenswrapper[4661]: I1001 07:12:35.224187 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pqw4p/crc-debug-xwsm4" event={"ID":"ad604d70-115a-47d6-b1b3-0a08089d3e86","Type":"ContainerStarted","Data":"5ca8a2542a50e58d2137e78d9ec8d9b33cc02e6b03be3ab881fb1933faad21b6"} Oct 01 07:12:35 crc kubenswrapper[4661]: I1001 07:12:35.224463 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pqw4p/crc-debug-xwsm4" event={"ID":"ad604d70-115a-47d6-b1b3-0a08089d3e86","Type":"ContainerStarted","Data":"fdeac182becf1acd16ff1fb5bfcb86a730396b73a3f16c93d5984c9e56ea87db"} Oct 01 07:12:35 crc kubenswrapper[4661]: I1001 07:12:35.243778 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-pqw4p/crc-debug-xwsm4" podStartSLOduration=1.243757741 podStartE2EDuration="1.243757741s" podCreationTimestamp="2025-10-01 07:12:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 07:12:35.239374321 +0000 UTC m=+6204.177352935" watchObservedRunningTime="2025-10-01 07:12:35.243757741 +0000 UTC m=+6204.181736355" Oct 01 07:13:58 crc kubenswrapper[4661]: I1001 07:13:58.906750 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-54f94df7b6-mhnj2_70d1ade0-7d6b-4c94-a376-ef7027a47a76/barbican-api/0.log" Oct 01 07:13:58 crc kubenswrapper[4661]: I1001 07:13:58.991180 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-54f94df7b6-mhnj2_70d1ade0-7d6b-4c94-a376-ef7027a47a76/barbican-api-log/0.log" Oct 01 07:13:59 crc kubenswrapper[4661]: I1001 07:13:59.099059 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-9964d7f68-mbqp4_e5417b7f-b126-4335-ac74-b5d8f5713aee/barbican-keystone-listener/0.log" Oct 01 07:13:59 crc kubenswrapper[4661]: I1001 07:13:59.234185 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-9964d7f68-mbqp4_e5417b7f-b126-4335-ac74-b5d8f5713aee/barbican-keystone-listener-log/0.log" Oct 01 07:13:59 crc kubenswrapper[4661]: I1001 07:13:59.321344 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6f9cbcb89-ndbgc_7097203a-fb10-4615-9115-97d10c5b114d/barbican-worker/0.log" Oct 01 07:13:59 crc kubenswrapper[4661]: I1001 07:13:59.483559 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6f9cbcb89-ndbgc_7097203a-fb10-4615-9115-97d10c5b114d/barbican-worker-log/0.log" Oct 01 07:13:59 crc kubenswrapper[4661]: I1001 07:13:59.538226 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-vwgk6_bbd00e4d-aa89-4800-867e-d8f78c3d2c70/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:13:59 crc kubenswrapper[4661]: I1001 07:13:59.825238 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_6dda16ec-71bd-4cca-b332-96772962b417/ceilometer-notification-agent/0.log" Oct 01 07:13:59 crc kubenswrapper[4661]: I1001 07:13:59.855815 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_6dda16ec-71bd-4cca-b332-96772962b417/ceilometer-central-agent/0.log" Oct 01 07:13:59 crc kubenswrapper[4661]: I1001 07:13:59.996468 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_6dda16ec-71bd-4cca-b332-96772962b417/proxy-httpd/0.log" Oct 01 07:14:00 crc kubenswrapper[4661]: I1001 07:14:00.004504 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_6dda16ec-71bd-4cca-b332-96772962b417/sg-core/0.log" Oct 01 07:14:00 crc kubenswrapper[4661]: I1001 07:14:00.230972 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5/cinder-api-log/0.log" Oct 01 07:14:00 crc kubenswrapper[4661]: I1001 07:14:00.470933 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_29f0e9e2-bbd8-4459-bc99-db5d742a37b8/cinder-scheduler/0.log" Oct 01 07:14:00 crc kubenswrapper[4661]: I1001 07:14:00.585494 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_9ea71b5c-bc2e-4091-bae3-0fba7bf2efa5/cinder-api/0.log" Oct 01 07:14:00 crc kubenswrapper[4661]: I1001 07:14:00.649859 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_29f0e9e2-bbd8-4459-bc99-db5d742a37b8/probe/0.log" Oct 01 07:14:00 crc kubenswrapper[4661]: I1001 07:14:00.766019 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-9248r_c6c35cbf-45e4-4538-b2de-4dbabd413307/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:14:00 crc kubenswrapper[4661]: I1001 07:14:00.962870 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-qwrp5_d5db5a1f-67ff-49de-af0f-2ddc5b6dc078/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:14:01 crc kubenswrapper[4661]: I1001 07:14:01.088928 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-rzr5t_cbe5a564-344a-449a-a457-61e5002621a6/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:14:01 crc kubenswrapper[4661]: I1001 07:14:01.245178 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-77b58f4b85-pvpg8_3f69f735-866d-4ab5-9ef4-f940c2cc2ee5/init/0.log" Oct 01 07:14:01 crc kubenswrapper[4661]: I1001 07:14:01.409199 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-77b58f4b85-pvpg8_3f69f735-866d-4ab5-9ef4-f940c2cc2ee5/init/0.log" Oct 01 07:14:01 crc kubenswrapper[4661]: I1001 07:14:01.798309 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e0802b8c-f0c0-4210-9618-ed452e52b5a0/glance-httpd/0.log" Oct 01 07:14:01 crc kubenswrapper[4661]: I1001 07:14:01.802836 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-77b58f4b85-pvpg8_3f69f735-866d-4ab5-9ef4-f940c2cc2ee5/dnsmasq-dns/0.log" Oct 01 07:14:01 crc kubenswrapper[4661]: I1001 07:14:01.886498 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-7w7pk_a68882a5-f8f7-40a2-8406-409452df5dc5/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:14:01 crc kubenswrapper[4661]: I1001 07:14:01.958221 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e0802b8c-f0c0-4210-9618-ed452e52b5a0/glance-log/0.log" Oct 01 07:14:02 crc kubenswrapper[4661]: I1001 07:14:02.058012 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_6aa66ead-9f31-4644-a6ba-b3f6ddb82c64/glance-log/0.log" Oct 01 07:14:02 crc kubenswrapper[4661]: I1001 07:14:02.077253 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_6aa66ead-9f31-4644-a6ba-b3f6ddb82c64/glance-httpd/0.log" Oct 01 07:14:02 crc kubenswrapper[4661]: I1001 07:14:02.315517 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-77d96d88fb-5fr24_a78c5827-b563-4f29-9a60-6810f67f943a/horizon/0.log" Oct 01 07:14:02 crc kubenswrapper[4661]: I1001 07:14:02.451602 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-nvtst_3b1a4183-d18a-4f41-b62a-12f52370c46e/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:14:02 crc kubenswrapper[4661]: I1001 07:14:02.583909 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-p6p8t_a9c45dbf-cdfc-49a6-ac9d-49609a690564/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:14:02 crc kubenswrapper[4661]: I1001 07:14:02.949020 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29321641-pqtk6_bbe73ddd-f327-4f4b-ba2d-d647bae84361/keystone-cron/0.log" Oct 01 07:14:03 crc kubenswrapper[4661]: I1001 07:14:03.078920 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-77d96d88fb-5fr24_a78c5827-b563-4f29-9a60-6810f67f943a/horizon-log/0.log" Oct 01 07:14:03 crc kubenswrapper[4661]: I1001 07:14:03.154102 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29321701-zsg45_850d09d7-51e1-4b5f-a53e-5d39cc38dac2/keystone-cron/0.log" Oct 01 07:14:03 crc kubenswrapper[4661]: I1001 07:14:03.317912 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7677654df9-tdbxq_a2287a17-7b4e-40d0-ba56-0e78abd1b1ec/keystone-api/0.log" Oct 01 07:14:03 crc kubenswrapper[4661]: I1001 07:14:03.440953 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_ea68e214-b71a-4f5b-b2ee-9091d484023d/kube-state-metrics/0.log" Oct 01 07:14:03 crc kubenswrapper[4661]: I1001 07:14:03.528114 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-kwhhq_322bbaf3-0120-49be-90f1-04d42199e753/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:14:04 crc kubenswrapper[4661]: I1001 07:14:04.087957 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-c77567f6f-m5g68_35deb613-6735-4de6-ab11-50138ce73e30/neutron-httpd/0.log" Oct 01 07:14:04 crc kubenswrapper[4661]: I1001 07:14:04.111186 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-wlfft_632190ce-99ee-453c-8cdb-103d2f512c6b/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:14:04 crc kubenswrapper[4661]: I1001 07:14:04.203525 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-c77567f6f-m5g68_35deb613-6735-4de6-ab11-50138ce73e30/neutron-api/0.log" Oct 01 07:14:04 crc kubenswrapper[4661]: I1001 07:14:04.308744 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 07:14:04 crc kubenswrapper[4661]: I1001 07:14:04.308793 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 07:14:05 crc kubenswrapper[4661]: I1001 07:14:05.432439 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_2652d904-fc8d-4fe1-8980-896266d64eec/nova-cell0-conductor-conductor/0.log" Oct 01 07:14:06 crc kubenswrapper[4661]: I1001 07:14:06.102300 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_12ce0bf4-4fb7-44da-87d4-9592ef8848a1/nova-cell1-conductor-conductor/0.log" Oct 01 07:14:06 crc kubenswrapper[4661]: I1001 07:14:06.252725 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636/nova-api-log/0.log" Oct 01 07:14:06 crc kubenswrapper[4661]: I1001 07:14:06.514671 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_09ca520d-ccbe-4bcd-8cc9-36a6ac5cf636/nova-api-api/0.log" Oct 01 07:14:06 crc kubenswrapper[4661]: I1001 07:14:06.625166 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_81e96b52-f038-466f-92cd-07f4f8574bd5/nova-cell1-novncproxy-novncproxy/0.log" Oct 01 07:14:06 crc kubenswrapper[4661]: I1001 07:14:06.814412 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-2xtbm_2e4df47f-3fbf-4a44-89d7-fd97b1fef95f/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:14:06 crc kubenswrapper[4661]: I1001 07:14:06.920308 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_299202ce-4dac-4387-8684-b94ca8f9f1b3/nova-metadata-log/0.log" Oct 01 07:14:07 crc kubenswrapper[4661]: I1001 07:14:07.471326 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e892c883-6f23-415f-9e9d-bde45fefe01e/mysql-bootstrap/0.log" Oct 01 07:14:07 crc kubenswrapper[4661]: I1001 07:14:07.497929 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_c1001022-d4fa-47f2-804f-480807988029/nova-scheduler-scheduler/0.log" Oct 01 07:14:07 crc kubenswrapper[4661]: I1001 07:14:07.700700 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e892c883-6f23-415f-9e9d-bde45fefe01e/mysql-bootstrap/0.log" Oct 01 07:14:07 crc kubenswrapper[4661]: I1001 07:14:07.713995 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e892c883-6f23-415f-9e9d-bde45fefe01e/galera/0.log" Oct 01 07:14:07 crc kubenswrapper[4661]: I1001 07:14:07.929486 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_daca2202-a971-4201-81be-edef6f0c40f6/mysql-bootstrap/0.log" Oct 01 07:14:08 crc kubenswrapper[4661]: I1001 07:14:08.095080 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_daca2202-a971-4201-81be-edef6f0c40f6/mysql-bootstrap/0.log" Oct 01 07:14:08 crc kubenswrapper[4661]: I1001 07:14:08.199758 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_daca2202-a971-4201-81be-edef6f0c40f6/galera/0.log" Oct 01 07:14:08 crc kubenswrapper[4661]: I1001 07:14:08.400170 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_c10d9095-6d32-4b4a-8706-d06e0693ddb9/openstackclient/0.log" Oct 01 07:14:08 crc kubenswrapper[4661]: I1001 07:14:08.605551 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-hlpwz_c93ef988-b9e9-4cfc-950b-2b3060b6e4b8/openstack-network-exporter/0.log" Oct 01 07:14:08 crc kubenswrapper[4661]: I1001 07:14:08.844599 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-mplg4_c5001332-068e-46eb-a21c-25e29832baab/ovn-controller/0.log" Oct 01 07:14:09 crc kubenswrapper[4661]: I1001 07:14:09.131741 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-wmxcl_34fe2d2b-33b8-4736-98c8-3b7ae70118dd/ovsdb-server-init/0.log" Oct 01 07:14:09 crc kubenswrapper[4661]: I1001 07:14:09.293290 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-wmxcl_34fe2d2b-33b8-4736-98c8-3b7ae70118dd/ovsdb-server-init/0.log" Oct 01 07:14:09 crc kubenswrapper[4661]: I1001 07:14:09.451713 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-wmxcl_34fe2d2b-33b8-4736-98c8-3b7ae70118dd/ovsdb-server/0.log" Oct 01 07:14:09 crc kubenswrapper[4661]: I1001 07:14:09.857379 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-wmxcl_34fe2d2b-33b8-4736-98c8-3b7ae70118dd/ovs-vswitchd/0.log" Oct 01 07:14:09 crc kubenswrapper[4661]: I1001 07:14:09.943339 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-v8wft_5dc6881d-aedd-4945-98d8-9993fedd71dd/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:14:10 crc kubenswrapper[4661]: I1001 07:14:10.029249 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_299202ce-4dac-4387-8684-b94ca8f9f1b3/nova-metadata-metadata/0.log" Oct 01 07:14:10 crc kubenswrapper[4661]: I1001 07:14:10.148352 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_8586024c-fe6a-4ccd-adc0-2e8e2a1bf823/openstack-network-exporter/0.log" Oct 01 07:14:10 crc kubenswrapper[4661]: I1001 07:14:10.217118 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_8586024c-fe6a-4ccd-adc0-2e8e2a1bf823/ovn-northd/0.log" Oct 01 07:14:10 crc kubenswrapper[4661]: I1001 07:14:10.343744 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_dff60953-8a38-41cb-bc21-6192798508a1/openstack-network-exporter/0.log" Oct 01 07:14:10 crc kubenswrapper[4661]: I1001 07:14:10.403040 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_2741b07c-1750-4920-a734-2f51af08ac8b/memcached/0.log" Oct 01 07:14:10 crc kubenswrapper[4661]: I1001 07:14:10.428230 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_dff60953-8a38-41cb-bc21-6192798508a1/ovsdbserver-nb/0.log" Oct 01 07:14:10 crc kubenswrapper[4661]: I1001 07:14:10.574575 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_6152c766-cf88-4b8c-9c8a-372dcdd4e62b/openstack-network-exporter/0.log" Oct 01 07:14:10 crc kubenswrapper[4661]: I1001 07:14:10.585397 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_6152c766-cf88-4b8c-9c8a-372dcdd4e62b/ovsdbserver-sb/0.log" Oct 01 07:14:10 crc kubenswrapper[4661]: I1001 07:14:10.890383 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_8df66a7a-5bab-428f-b415-0e46c952f4fb/init-config-reloader/0.log" Oct 01 07:14:10 crc kubenswrapper[4661]: I1001 07:14:10.932084 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-558754b4b6-4khhg_59481d9b-6c9f-48ac-93d1-870dbfb6edaf/placement-api/0.log" Oct 01 07:14:10 crc kubenswrapper[4661]: I1001 07:14:10.991276 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-558754b4b6-4khhg_59481d9b-6c9f-48ac-93d1-870dbfb6edaf/placement-log/0.log" Oct 01 07:14:11 crc kubenswrapper[4661]: I1001 07:14:11.086000 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_8df66a7a-5bab-428f-b415-0e46c952f4fb/init-config-reloader/0.log" Oct 01 07:14:11 crc kubenswrapper[4661]: I1001 07:14:11.095853 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_8df66a7a-5bab-428f-b415-0e46c952f4fb/config-reloader/0.log" Oct 01 07:14:11 crc kubenswrapper[4661]: I1001 07:14:11.149120 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_8df66a7a-5bab-428f-b415-0e46c952f4fb/prometheus/0.log" Oct 01 07:14:11 crc kubenswrapper[4661]: I1001 07:14:11.159539 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_8df66a7a-5bab-428f-b415-0e46c952f4fb/thanos-sidecar/0.log" Oct 01 07:14:11 crc kubenswrapper[4661]: I1001 07:14:11.311675 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_066b014c-15b7-49e3-9f01-b758855ca8a2/setup-container/0.log" Oct 01 07:14:11 crc kubenswrapper[4661]: I1001 07:14:11.502415 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_066b014c-15b7-49e3-9f01-b758855ca8a2/setup-container/0.log" Oct 01 07:14:11 crc kubenswrapper[4661]: I1001 07:14:11.520671 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_066b014c-15b7-49e3-9f01-b758855ca8a2/rabbitmq/0.log" Oct 01 07:14:11 crc kubenswrapper[4661]: I1001 07:14:11.529999 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_1658ccd7-4bae-45bf-aa67-fc5c075a417c/setup-container/0.log" Oct 01 07:14:11 crc kubenswrapper[4661]: I1001 07:14:11.693288 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_1658ccd7-4bae-45bf-aa67-fc5c075a417c/rabbitmq/0.log" Oct 01 07:14:11 crc kubenswrapper[4661]: I1001 07:14:11.716818 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_1658ccd7-4bae-45bf-aa67-fc5c075a417c/setup-container/0.log" Oct 01 07:14:11 crc kubenswrapper[4661]: I1001 07:14:11.766689 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c9a455c2-0405-4416-a367-c34353ee3fa3/setup-container/0.log" Oct 01 07:14:11 crc kubenswrapper[4661]: I1001 07:14:11.920198 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c9a455c2-0405-4416-a367-c34353ee3fa3/setup-container/0.log" Oct 01 07:14:11 crc kubenswrapper[4661]: I1001 07:14:11.949695 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c9a455c2-0405-4416-a367-c34353ee3fa3/rabbitmq/0.log" Oct 01 07:14:12 crc kubenswrapper[4661]: I1001 07:14:12.027620 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-7kktv_c9202962-2893-458d-996c-2890fa302029/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:14:12 crc kubenswrapper[4661]: I1001 07:14:12.126971 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-sjh66_dc423888-0bfc-45b9-ba9f-6bc52e8df43b/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:14:12 crc kubenswrapper[4661]: I1001 07:14:12.196017 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-xx75k_a0c02866-25ee-4ef1-9bba-572422cabc26/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:14:12 crc kubenswrapper[4661]: I1001 07:14:12.346244 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-fwf7d_daed5adf-0ad7-4236-abc5-fcd5053645b7/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:14:12 crc kubenswrapper[4661]: I1001 07:14:12.455306 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-fzw95_7d367bb3-134b-460e-8cff-75aad6a88043/ssh-known-hosts-edpm-deployment/0.log" Oct 01 07:14:12 crc kubenswrapper[4661]: I1001 07:14:12.657849 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-57dbcb9cbf-64x2k_cb384d97-a4b8-4eba-ac70-0ba6843cec4e/proxy-server/0.log" Oct 01 07:14:12 crc kubenswrapper[4661]: I1001 07:14:12.735821 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-57dbcb9cbf-64x2k_cb384d97-a4b8-4eba-ac70-0ba6843cec4e/proxy-httpd/0.log" Oct 01 07:14:12 crc kubenswrapper[4661]: I1001 07:14:12.782282 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-t28p6_66f7b863-7d30-41b4-882c-c982fafa148a/swift-ring-rebalance/0.log" Oct 01 07:14:12 crc kubenswrapper[4661]: I1001 07:14:12.895528 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/account-auditor/0.log" Oct 01 07:14:12 crc kubenswrapper[4661]: I1001 07:14:12.922988 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/account-reaper/0.log" Oct 01 07:14:13 crc kubenswrapper[4661]: I1001 07:14:13.008360 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/account-replicator/0.log" Oct 01 07:14:13 crc kubenswrapper[4661]: I1001 07:14:13.090123 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/container-auditor/0.log" Oct 01 07:14:13 crc kubenswrapper[4661]: I1001 07:14:13.091597 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/account-server/0.log" Oct 01 07:14:13 crc kubenswrapper[4661]: I1001 07:14:13.155263 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/container-replicator/0.log" Oct 01 07:14:13 crc kubenswrapper[4661]: I1001 07:14:13.207503 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/container-server/0.log" Oct 01 07:14:13 crc kubenswrapper[4661]: I1001 07:14:13.275324 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/container-updater/0.log" Oct 01 07:14:13 crc kubenswrapper[4661]: I1001 07:14:13.300698 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/object-auditor/0.log" Oct 01 07:14:13 crc kubenswrapper[4661]: I1001 07:14:13.331933 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/object-expirer/0.log" Oct 01 07:14:13 crc kubenswrapper[4661]: I1001 07:14:13.422404 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/object-replicator/0.log" Oct 01 07:14:13 crc kubenswrapper[4661]: I1001 07:14:13.457127 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/object-server/0.log" Oct 01 07:14:13 crc kubenswrapper[4661]: I1001 07:14:13.483175 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/object-updater/0.log" Oct 01 07:14:13 crc kubenswrapper[4661]: I1001 07:14:13.519544 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/rsync/0.log" Oct 01 07:14:13 crc kubenswrapper[4661]: I1001 07:14:13.580814 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e08e6861-2a19-4c40-8ed3-aeb2662d75bd/swift-recon-cron/0.log" Oct 01 07:14:13 crc kubenswrapper[4661]: I1001 07:14:13.694848 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-7pfqs_e2078d83-8d53-4052-8b77-031948bc8705/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:14:13 crc kubenswrapper[4661]: I1001 07:14:13.784103 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_ff1ca911-a470-4bfb-8cc2-3f76257eed1f/tempest-tests-tempest-tests-runner/0.log" Oct 01 07:14:13 crc kubenswrapper[4661]: I1001 07:14:13.860602 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_1c1b7300-2c13-4f7a-92d8-be7ff9e0374c/test-operator-logs-container/0.log" Oct 01 07:14:13 crc kubenswrapper[4661]: I1001 07:14:13.981654 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-4cxwq_5ec720ac-37b8-4dab-9b08-717b48bfae27/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 07:14:14 crc kubenswrapper[4661]: I1001 07:14:14.829713 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-applier-0_b6ede629-3e6b-448b-be47-77ce371d40f7/watcher-applier/0.log" Oct 01 07:14:15 crc kubenswrapper[4661]: I1001 07:14:15.200169 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_0858ffab-0d1d-422b-8ac2-abeef9ab22ed/watcher-api-log/0.log" Oct 01 07:14:17 crc kubenswrapper[4661]: I1001 07:14:17.719410 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-decision-engine-0_7867e060-f9b9-4ef6-9fb2-2a4d19d7ba1a/watcher-decision-engine/0.log" Oct 01 07:14:18 crc kubenswrapper[4661]: I1001 07:14:18.826269 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_0858ffab-0d1d-422b-8ac2-abeef9ab22ed/watcher-api/0.log" Oct 01 07:14:25 crc kubenswrapper[4661]: I1001 07:14:25.483425 4661 scope.go:117] "RemoveContainer" containerID="9c89ead3089bc0d2b95393b0b74b2035a7a5ceb0b2fed9ebe900400d523ef470" Oct 01 07:14:34 crc kubenswrapper[4661]: I1001 07:14:34.309008 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 07:14:34 crc kubenswrapper[4661]: I1001 07:14:34.309537 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 07:14:39 crc kubenswrapper[4661]: I1001 07:14:39.458975 4661 generic.go:334] "Generic (PLEG): container finished" podID="ad604d70-115a-47d6-b1b3-0a08089d3e86" containerID="5ca8a2542a50e58d2137e78d9ec8d9b33cc02e6b03be3ab881fb1933faad21b6" exitCode=0 Oct 01 07:14:39 crc kubenswrapper[4661]: I1001 07:14:39.459057 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pqw4p/crc-debug-xwsm4" event={"ID":"ad604d70-115a-47d6-b1b3-0a08089d3e86","Type":"ContainerDied","Data":"5ca8a2542a50e58d2137e78d9ec8d9b33cc02e6b03be3ab881fb1933faad21b6"} Oct 01 07:14:40 crc kubenswrapper[4661]: I1001 07:14:40.580320 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pqw4p/crc-debug-xwsm4" Oct 01 07:14:40 crc kubenswrapper[4661]: I1001 07:14:40.606840 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-pqw4p/crc-debug-xwsm4"] Oct 01 07:14:40 crc kubenswrapper[4661]: I1001 07:14:40.621924 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-pqw4p/crc-debug-xwsm4"] Oct 01 07:14:40 crc kubenswrapper[4661]: I1001 07:14:40.654986 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9flg7\" (UniqueName: \"kubernetes.io/projected/ad604d70-115a-47d6-b1b3-0a08089d3e86-kube-api-access-9flg7\") pod \"ad604d70-115a-47d6-b1b3-0a08089d3e86\" (UID: \"ad604d70-115a-47d6-b1b3-0a08089d3e86\") " Oct 01 07:14:40 crc kubenswrapper[4661]: I1001 07:14:40.655280 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ad604d70-115a-47d6-b1b3-0a08089d3e86-host\") pod \"ad604d70-115a-47d6-b1b3-0a08089d3e86\" (UID: \"ad604d70-115a-47d6-b1b3-0a08089d3e86\") " Oct 01 07:14:40 crc kubenswrapper[4661]: I1001 07:14:40.655459 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad604d70-115a-47d6-b1b3-0a08089d3e86-host" (OuterVolumeSpecName: "host") pod "ad604d70-115a-47d6-b1b3-0a08089d3e86" (UID: "ad604d70-115a-47d6-b1b3-0a08089d3e86"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 07:14:40 crc kubenswrapper[4661]: I1001 07:14:40.655979 4661 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ad604d70-115a-47d6-b1b3-0a08089d3e86-host\") on node \"crc\" DevicePath \"\"" Oct 01 07:14:40 crc kubenswrapper[4661]: I1001 07:14:40.664671 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad604d70-115a-47d6-b1b3-0a08089d3e86-kube-api-access-9flg7" (OuterVolumeSpecName: "kube-api-access-9flg7") pod "ad604d70-115a-47d6-b1b3-0a08089d3e86" (UID: "ad604d70-115a-47d6-b1b3-0a08089d3e86"). InnerVolumeSpecName "kube-api-access-9flg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 07:14:40 crc kubenswrapper[4661]: I1001 07:14:40.757748 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9flg7\" (UniqueName: \"kubernetes.io/projected/ad604d70-115a-47d6-b1b3-0a08089d3e86-kube-api-access-9flg7\") on node \"crc\" DevicePath \"\"" Oct 01 07:14:41 crc kubenswrapper[4661]: I1001 07:14:41.487513 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fdeac182becf1acd16ff1fb5bfcb86a730396b73a3f16c93d5984c9e56ea87db" Oct 01 07:14:41 crc kubenswrapper[4661]: I1001 07:14:41.487574 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pqw4p/crc-debug-xwsm4" Oct 01 07:14:41 crc kubenswrapper[4661]: I1001 07:14:41.775391 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad604d70-115a-47d6-b1b3-0a08089d3e86" path="/var/lib/kubelet/pods/ad604d70-115a-47d6-b1b3-0a08089d3e86/volumes" Oct 01 07:14:41 crc kubenswrapper[4661]: I1001 07:14:41.820272 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-pqw4p/crc-debug-m9kkk"] Oct 01 07:14:41 crc kubenswrapper[4661]: E1001 07:14:41.820651 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad604d70-115a-47d6-b1b3-0a08089d3e86" containerName="container-00" Oct 01 07:14:41 crc kubenswrapper[4661]: I1001 07:14:41.820665 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad604d70-115a-47d6-b1b3-0a08089d3e86" containerName="container-00" Oct 01 07:14:41 crc kubenswrapper[4661]: I1001 07:14:41.820871 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad604d70-115a-47d6-b1b3-0a08089d3e86" containerName="container-00" Oct 01 07:14:41 crc kubenswrapper[4661]: I1001 07:14:41.821462 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pqw4p/crc-debug-m9kkk" Oct 01 07:14:41 crc kubenswrapper[4661]: I1001 07:14:41.823943 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-pqw4p"/"default-dockercfg-9njmw" Oct 01 07:14:41 crc kubenswrapper[4661]: I1001 07:14:41.879518 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/561a8e69-c79b-494a-98fe-12982136fb96-host\") pod \"crc-debug-m9kkk\" (UID: \"561a8e69-c79b-494a-98fe-12982136fb96\") " pod="openshift-must-gather-pqw4p/crc-debug-m9kkk" Oct 01 07:14:41 crc kubenswrapper[4661]: I1001 07:14:41.880711 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbkqd\" (UniqueName: \"kubernetes.io/projected/561a8e69-c79b-494a-98fe-12982136fb96-kube-api-access-mbkqd\") pod \"crc-debug-m9kkk\" (UID: \"561a8e69-c79b-494a-98fe-12982136fb96\") " pod="openshift-must-gather-pqw4p/crc-debug-m9kkk" Oct 01 07:14:41 crc kubenswrapper[4661]: I1001 07:14:41.982290 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbkqd\" (UniqueName: \"kubernetes.io/projected/561a8e69-c79b-494a-98fe-12982136fb96-kube-api-access-mbkqd\") pod \"crc-debug-m9kkk\" (UID: \"561a8e69-c79b-494a-98fe-12982136fb96\") " pod="openshift-must-gather-pqw4p/crc-debug-m9kkk" Oct 01 07:14:41 crc kubenswrapper[4661]: I1001 07:14:41.982354 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/561a8e69-c79b-494a-98fe-12982136fb96-host\") pod \"crc-debug-m9kkk\" (UID: \"561a8e69-c79b-494a-98fe-12982136fb96\") " pod="openshift-must-gather-pqw4p/crc-debug-m9kkk" Oct 01 07:14:41 crc kubenswrapper[4661]: I1001 07:14:41.982474 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/561a8e69-c79b-494a-98fe-12982136fb96-host\") pod \"crc-debug-m9kkk\" (UID: \"561a8e69-c79b-494a-98fe-12982136fb96\") " pod="openshift-must-gather-pqw4p/crc-debug-m9kkk" Oct 01 07:14:42 crc kubenswrapper[4661]: I1001 07:14:42.001236 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbkqd\" (UniqueName: \"kubernetes.io/projected/561a8e69-c79b-494a-98fe-12982136fb96-kube-api-access-mbkqd\") pod \"crc-debug-m9kkk\" (UID: \"561a8e69-c79b-494a-98fe-12982136fb96\") " pod="openshift-must-gather-pqw4p/crc-debug-m9kkk" Oct 01 07:14:42 crc kubenswrapper[4661]: I1001 07:14:42.139494 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pqw4p/crc-debug-m9kkk" Oct 01 07:14:42 crc kubenswrapper[4661]: I1001 07:14:42.500487 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pqw4p/crc-debug-m9kkk" event={"ID":"561a8e69-c79b-494a-98fe-12982136fb96","Type":"ContainerStarted","Data":"f79fb10e7cff686f8ece022cfad89d8a54824883ff74e6536165cb16dc4cd1b5"} Oct 01 07:14:42 crc kubenswrapper[4661]: I1001 07:14:42.501071 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pqw4p/crc-debug-m9kkk" event={"ID":"561a8e69-c79b-494a-98fe-12982136fb96","Type":"ContainerStarted","Data":"d530628bff2814ef7bef89d6ab48f5a9017a6ac100310921d537a56b489406d8"} Oct 01 07:14:42 crc kubenswrapper[4661]: I1001 07:14:42.519082 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-pqw4p/crc-debug-m9kkk" podStartSLOduration=1.5190600619999999 podStartE2EDuration="1.519060062s" podCreationTimestamp="2025-10-01 07:14:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 07:14:42.518118247 +0000 UTC m=+6331.456096871" watchObservedRunningTime="2025-10-01 07:14:42.519060062 +0000 UTC m=+6331.457038666" Oct 01 07:14:43 crc kubenswrapper[4661]: I1001 07:14:43.511118 4661 generic.go:334] "Generic (PLEG): container finished" podID="561a8e69-c79b-494a-98fe-12982136fb96" containerID="f79fb10e7cff686f8ece022cfad89d8a54824883ff74e6536165cb16dc4cd1b5" exitCode=0 Oct 01 07:14:43 crc kubenswrapper[4661]: I1001 07:14:43.511440 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pqw4p/crc-debug-m9kkk" event={"ID":"561a8e69-c79b-494a-98fe-12982136fb96","Type":"ContainerDied","Data":"f79fb10e7cff686f8ece022cfad89d8a54824883ff74e6536165cb16dc4cd1b5"} Oct 01 07:14:44 crc kubenswrapper[4661]: I1001 07:14:44.621970 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pqw4p/crc-debug-m9kkk" Oct 01 07:14:44 crc kubenswrapper[4661]: I1001 07:14:44.730086 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/561a8e69-c79b-494a-98fe-12982136fb96-host\") pod \"561a8e69-c79b-494a-98fe-12982136fb96\" (UID: \"561a8e69-c79b-494a-98fe-12982136fb96\") " Oct 01 07:14:44 crc kubenswrapper[4661]: I1001 07:14:44.730158 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/561a8e69-c79b-494a-98fe-12982136fb96-host" (OuterVolumeSpecName: "host") pod "561a8e69-c79b-494a-98fe-12982136fb96" (UID: "561a8e69-c79b-494a-98fe-12982136fb96"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 07:14:44 crc kubenswrapper[4661]: I1001 07:14:44.730305 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mbkqd\" (UniqueName: \"kubernetes.io/projected/561a8e69-c79b-494a-98fe-12982136fb96-kube-api-access-mbkqd\") pod \"561a8e69-c79b-494a-98fe-12982136fb96\" (UID: \"561a8e69-c79b-494a-98fe-12982136fb96\") " Oct 01 07:14:44 crc kubenswrapper[4661]: I1001 07:14:44.731411 4661 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/561a8e69-c79b-494a-98fe-12982136fb96-host\") on node \"crc\" DevicePath \"\"" Oct 01 07:14:44 crc kubenswrapper[4661]: I1001 07:14:44.735753 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/561a8e69-c79b-494a-98fe-12982136fb96-kube-api-access-mbkqd" (OuterVolumeSpecName: "kube-api-access-mbkqd") pod "561a8e69-c79b-494a-98fe-12982136fb96" (UID: "561a8e69-c79b-494a-98fe-12982136fb96"). InnerVolumeSpecName "kube-api-access-mbkqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 07:14:44 crc kubenswrapper[4661]: I1001 07:14:44.832872 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mbkqd\" (UniqueName: \"kubernetes.io/projected/561a8e69-c79b-494a-98fe-12982136fb96-kube-api-access-mbkqd\") on node \"crc\" DevicePath \"\"" Oct 01 07:14:45 crc kubenswrapper[4661]: I1001 07:14:45.530525 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pqw4p/crc-debug-m9kkk" event={"ID":"561a8e69-c79b-494a-98fe-12982136fb96","Type":"ContainerDied","Data":"d530628bff2814ef7bef89d6ab48f5a9017a6ac100310921d537a56b489406d8"} Oct 01 07:14:45 crc kubenswrapper[4661]: I1001 07:14:45.530563 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d530628bff2814ef7bef89d6ab48f5a9017a6ac100310921d537a56b489406d8" Oct 01 07:14:45 crc kubenswrapper[4661]: I1001 07:14:45.530604 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pqw4p/crc-debug-m9kkk" Oct 01 07:14:52 crc kubenswrapper[4661]: I1001 07:14:52.049610 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-pqw4p/crc-debug-m9kkk"] Oct 01 07:14:52 crc kubenswrapper[4661]: I1001 07:14:52.060176 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-pqw4p/crc-debug-m9kkk"] Oct 01 07:14:53 crc kubenswrapper[4661]: I1001 07:14:53.242274 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-pqw4p/crc-debug-zj665"] Oct 01 07:14:53 crc kubenswrapper[4661]: E1001 07:14:53.242721 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="561a8e69-c79b-494a-98fe-12982136fb96" containerName="container-00" Oct 01 07:14:53 crc kubenswrapper[4661]: I1001 07:14:53.242734 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="561a8e69-c79b-494a-98fe-12982136fb96" containerName="container-00" Oct 01 07:14:53 crc kubenswrapper[4661]: I1001 07:14:53.242949 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="561a8e69-c79b-494a-98fe-12982136fb96" containerName="container-00" Oct 01 07:14:53 crc kubenswrapper[4661]: I1001 07:14:53.243643 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pqw4p/crc-debug-zj665" Oct 01 07:14:53 crc kubenswrapper[4661]: I1001 07:14:53.245126 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-pqw4p"/"default-dockercfg-9njmw" Oct 01 07:14:53 crc kubenswrapper[4661]: I1001 07:14:53.286421 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tntr2\" (UniqueName: \"kubernetes.io/projected/b365a42d-0abd-4018-bffe-700c99c1e08c-kube-api-access-tntr2\") pod \"crc-debug-zj665\" (UID: \"b365a42d-0abd-4018-bffe-700c99c1e08c\") " pod="openshift-must-gather-pqw4p/crc-debug-zj665" Oct 01 07:14:53 crc kubenswrapper[4661]: I1001 07:14:53.286503 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b365a42d-0abd-4018-bffe-700c99c1e08c-host\") pod \"crc-debug-zj665\" (UID: \"b365a42d-0abd-4018-bffe-700c99c1e08c\") " pod="openshift-must-gather-pqw4p/crc-debug-zj665" Oct 01 07:14:53 crc kubenswrapper[4661]: I1001 07:14:53.388284 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tntr2\" (UniqueName: \"kubernetes.io/projected/b365a42d-0abd-4018-bffe-700c99c1e08c-kube-api-access-tntr2\") pod \"crc-debug-zj665\" (UID: \"b365a42d-0abd-4018-bffe-700c99c1e08c\") " pod="openshift-must-gather-pqw4p/crc-debug-zj665" Oct 01 07:14:53 crc kubenswrapper[4661]: I1001 07:14:53.388386 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b365a42d-0abd-4018-bffe-700c99c1e08c-host\") pod \"crc-debug-zj665\" (UID: \"b365a42d-0abd-4018-bffe-700c99c1e08c\") " pod="openshift-must-gather-pqw4p/crc-debug-zj665" Oct 01 07:14:53 crc kubenswrapper[4661]: I1001 07:14:53.388560 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b365a42d-0abd-4018-bffe-700c99c1e08c-host\") pod \"crc-debug-zj665\" (UID: \"b365a42d-0abd-4018-bffe-700c99c1e08c\") " pod="openshift-must-gather-pqw4p/crc-debug-zj665" Oct 01 07:14:53 crc kubenswrapper[4661]: I1001 07:14:53.431073 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tntr2\" (UniqueName: \"kubernetes.io/projected/b365a42d-0abd-4018-bffe-700c99c1e08c-kube-api-access-tntr2\") pod \"crc-debug-zj665\" (UID: \"b365a42d-0abd-4018-bffe-700c99c1e08c\") " pod="openshift-must-gather-pqw4p/crc-debug-zj665" Oct 01 07:14:53 crc kubenswrapper[4661]: I1001 07:14:53.569095 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pqw4p/crc-debug-zj665" Oct 01 07:14:53 crc kubenswrapper[4661]: I1001 07:14:53.778185 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="561a8e69-c79b-494a-98fe-12982136fb96" path="/var/lib/kubelet/pods/561a8e69-c79b-494a-98fe-12982136fb96/volumes" Oct 01 07:14:54 crc kubenswrapper[4661]: I1001 07:14:54.622418 4661 generic.go:334] "Generic (PLEG): container finished" podID="b365a42d-0abd-4018-bffe-700c99c1e08c" containerID="6643243e7ce9c5a1281e8408d217440141275ff9458a6e1c9e84654e6818a382" exitCode=0 Oct 01 07:14:54 crc kubenswrapper[4661]: I1001 07:14:54.622763 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pqw4p/crc-debug-zj665" event={"ID":"b365a42d-0abd-4018-bffe-700c99c1e08c","Type":"ContainerDied","Data":"6643243e7ce9c5a1281e8408d217440141275ff9458a6e1c9e84654e6818a382"} Oct 01 07:14:54 crc kubenswrapper[4661]: I1001 07:14:54.622916 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pqw4p/crc-debug-zj665" event={"ID":"b365a42d-0abd-4018-bffe-700c99c1e08c","Type":"ContainerStarted","Data":"ea2609e7be90ae544a5274a407ee8d65e987a98e0815fbdd60ac9697c081bfce"} Oct 01 07:14:54 crc kubenswrapper[4661]: I1001 07:14:54.680018 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-pqw4p/crc-debug-zj665"] Oct 01 07:14:54 crc kubenswrapper[4661]: I1001 07:14:54.702229 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-pqw4p/crc-debug-zj665"] Oct 01 07:14:55 crc kubenswrapper[4661]: I1001 07:14:55.755258 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pqw4p/crc-debug-zj665" Oct 01 07:14:55 crc kubenswrapper[4661]: I1001 07:14:55.839273 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b365a42d-0abd-4018-bffe-700c99c1e08c-host\") pod \"b365a42d-0abd-4018-bffe-700c99c1e08c\" (UID: \"b365a42d-0abd-4018-bffe-700c99c1e08c\") " Oct 01 07:14:55 crc kubenswrapper[4661]: I1001 07:14:55.839557 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tntr2\" (UniqueName: \"kubernetes.io/projected/b365a42d-0abd-4018-bffe-700c99c1e08c-kube-api-access-tntr2\") pod \"b365a42d-0abd-4018-bffe-700c99c1e08c\" (UID: \"b365a42d-0abd-4018-bffe-700c99c1e08c\") " Oct 01 07:14:55 crc kubenswrapper[4661]: I1001 07:14:55.841035 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b365a42d-0abd-4018-bffe-700c99c1e08c-host" (OuterVolumeSpecName: "host") pod "b365a42d-0abd-4018-bffe-700c99c1e08c" (UID: "b365a42d-0abd-4018-bffe-700c99c1e08c"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 07:14:55 crc kubenswrapper[4661]: I1001 07:14:55.847148 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b365a42d-0abd-4018-bffe-700c99c1e08c-kube-api-access-tntr2" (OuterVolumeSpecName: "kube-api-access-tntr2") pod "b365a42d-0abd-4018-bffe-700c99c1e08c" (UID: "b365a42d-0abd-4018-bffe-700c99c1e08c"). InnerVolumeSpecName "kube-api-access-tntr2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 07:14:55 crc kubenswrapper[4661]: I1001 07:14:55.941615 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tntr2\" (UniqueName: \"kubernetes.io/projected/b365a42d-0abd-4018-bffe-700c99c1e08c-kube-api-access-tntr2\") on node \"crc\" DevicePath \"\"" Oct 01 07:14:55 crc kubenswrapper[4661]: I1001 07:14:55.941858 4661 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b365a42d-0abd-4018-bffe-700c99c1e08c-host\") on node \"crc\" DevicePath \"\"" Oct 01 07:14:56 crc kubenswrapper[4661]: I1001 07:14:56.559420 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-f7f98cb69-p6lbb_1e3e3612-5d8a-4db4-af00-94428fcb570e/kube-rbac-proxy/0.log" Oct 01 07:14:56 crc kubenswrapper[4661]: I1001 07:14:56.586933 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-f7f98cb69-p6lbb_1e3e3612-5d8a-4db4-af00-94428fcb570e/manager/0.log" Oct 01 07:14:56 crc kubenswrapper[4661]: I1001 07:14:56.639706 4661 scope.go:117] "RemoveContainer" containerID="6643243e7ce9c5a1281e8408d217440141275ff9458a6e1c9e84654e6818a382" Oct 01 07:14:56 crc kubenswrapper[4661]: I1001 07:14:56.639851 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pqw4p/crc-debug-zj665" Oct 01 07:14:56 crc kubenswrapper[4661]: I1001 07:14:56.710828 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859cd486d-qslns_a23b3c29-b18c-4ea0-8723-41000d6a754b/kube-rbac-proxy/0.log" Oct 01 07:14:56 crc kubenswrapper[4661]: I1001 07:14:56.810911 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859cd486d-qslns_a23b3c29-b18c-4ea0-8723-41000d6a754b/manager/0.log" Oct 01 07:14:56 crc kubenswrapper[4661]: I1001 07:14:56.903642 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-77fb7bcf5b-jszh9_dfdbd600-7bd1-43fa-a4cf-44f66f79e3e2/kube-rbac-proxy/0.log" Oct 01 07:14:56 crc kubenswrapper[4661]: I1001 07:14:56.930650 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-77fb7bcf5b-jszh9_dfdbd600-7bd1-43fa-a4cf-44f66f79e3e2/manager/0.log" Oct 01 07:14:56 crc kubenswrapper[4661]: I1001 07:14:56.988617 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd_e97f5f22-3f8d-416c-a584-6d49ccb28f70/util/0.log" Oct 01 07:14:57 crc kubenswrapper[4661]: I1001 07:14:57.173543 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd_e97f5f22-3f8d-416c-a584-6d49ccb28f70/pull/0.log" Oct 01 07:14:57 crc kubenswrapper[4661]: I1001 07:14:57.184706 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd_e97f5f22-3f8d-416c-a584-6d49ccb28f70/util/0.log" Oct 01 07:14:57 crc kubenswrapper[4661]: I1001 07:14:57.187331 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd_e97f5f22-3f8d-416c-a584-6d49ccb28f70/pull/0.log" Oct 01 07:14:57 crc kubenswrapper[4661]: I1001 07:14:57.338434 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd_e97f5f22-3f8d-416c-a584-6d49ccb28f70/util/0.log" Oct 01 07:14:57 crc kubenswrapper[4661]: I1001 07:14:57.366917 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd_e97f5f22-3f8d-416c-a584-6d49ccb28f70/pull/0.log" Oct 01 07:14:57 crc kubenswrapper[4661]: I1001 07:14:57.377623 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_edfb8f1e3757614357120847610749d1bf40332c7e495947e8dcac02c16qxcd_e97f5f22-3f8d-416c-a584-6d49ccb28f70/extract/0.log" Oct 01 07:14:57 crc kubenswrapper[4661]: I1001 07:14:57.500960 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-8bc4775b5-fmrmk_29b41883-13c4-454f-a3d3-45aa0db29f82/kube-rbac-proxy/0.log" Oct 01 07:14:57 crc kubenswrapper[4661]: I1001 07:14:57.588884 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-8bc4775b5-fmrmk_29b41883-13c4-454f-a3d3-45aa0db29f82/manager/0.log" Oct 01 07:14:57 crc kubenswrapper[4661]: I1001 07:14:57.589084 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5b4fc86755-8fsv8_12785d9c-9cdb-4c80-bc4b-ee398e655992/kube-rbac-proxy/0.log" Oct 01 07:14:57 crc kubenswrapper[4661]: I1001 07:14:57.686842 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5b4fc86755-8fsv8_12785d9c-9cdb-4c80-bc4b-ee398e655992/manager/0.log" Oct 01 07:14:57 crc kubenswrapper[4661]: I1001 07:14:57.744663 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-679b4759bb-7w27g_001f8e02-2d35-4c68-88ae-4d732588213c/kube-rbac-proxy/0.log" Oct 01 07:14:57 crc kubenswrapper[4661]: I1001 07:14:57.767143 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b365a42d-0abd-4018-bffe-700c99c1e08c" path="/var/lib/kubelet/pods/b365a42d-0abd-4018-bffe-700c99c1e08c/volumes" Oct 01 07:14:57 crc kubenswrapper[4661]: I1001 07:14:57.804889 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-679b4759bb-7w27g_001f8e02-2d35-4c68-88ae-4d732588213c/manager/0.log" Oct 01 07:14:57 crc kubenswrapper[4661]: I1001 07:14:57.938186 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-5c8fdc4d5c-9msgq_0e131827-21a2-4464-80d3-7528c1d8c52a/kube-rbac-proxy/0.log" Oct 01 07:14:58 crc kubenswrapper[4661]: I1001 07:14:58.044029 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6f589bc7f7-7fjjd_50f20957-2408-4a65-a326-e3b76051b38b/kube-rbac-proxy/0.log" Oct 01 07:14:58 crc kubenswrapper[4661]: I1001 07:14:58.112042 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-5c8fdc4d5c-9msgq_0e131827-21a2-4464-80d3-7528c1d8c52a/manager/0.log" Oct 01 07:14:58 crc kubenswrapper[4661]: I1001 07:14:58.184666 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6f589bc7f7-7fjjd_50f20957-2408-4a65-a326-e3b76051b38b/manager/0.log" Oct 01 07:14:58 crc kubenswrapper[4661]: I1001 07:14:58.233228 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-59d7dc95cf-6l9mf_c0601966-5144-438f-a862-3f397e7064a4/kube-rbac-proxy/0.log" Oct 01 07:14:58 crc kubenswrapper[4661]: I1001 07:14:58.345228 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-59d7dc95cf-6l9mf_c0601966-5144-438f-a862-3f397e7064a4/manager/0.log" Oct 01 07:14:58 crc kubenswrapper[4661]: I1001 07:14:58.425540 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-b7cf8cb5f-dxp6r_f020bcbc-c80b-4465-9733-204a86325234/kube-rbac-proxy/0.log" Oct 01 07:14:58 crc kubenswrapper[4661]: I1001 07:14:58.456857 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-b7cf8cb5f-dxp6r_f020bcbc-c80b-4465-9733-204a86325234/manager/0.log" Oct 01 07:14:58 crc kubenswrapper[4661]: I1001 07:14:58.617778 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67bf5bb885-nwsxg_9d7506fd-1133-4927-872d-c68c525cba62/manager/0.log" Oct 01 07:14:58 crc kubenswrapper[4661]: I1001 07:14:58.644738 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67bf5bb885-nwsxg_9d7506fd-1133-4927-872d-c68c525cba62/kube-rbac-proxy/0.log" Oct 01 07:14:58 crc kubenswrapper[4661]: I1001 07:14:58.798449 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6b96467f46-8kwhx_2e8d1048-762f-4737-82c2-c6244072bf9d/kube-rbac-proxy/0.log" Oct 01 07:14:58 crc kubenswrapper[4661]: I1001 07:14:58.832479 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6b96467f46-8kwhx_2e8d1048-762f-4737-82c2-c6244072bf9d/manager/0.log" Oct 01 07:14:58 crc kubenswrapper[4661]: I1001 07:14:58.873455 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-79f9fc9fd8-68scz_34d73c0f-65b8-4718-8409-849ac307168f/kube-rbac-proxy/0.log" Oct 01 07:14:59 crc kubenswrapper[4661]: I1001 07:14:59.033962 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6fb7d6b8bf-f248r_5fbac0a5-84db-4228-9c2c-93d8d551044d/kube-rbac-proxy/0.log" Oct 01 07:14:59 crc kubenswrapper[4661]: I1001 07:14:59.071606 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-79f9fc9fd8-68scz_34d73c0f-65b8-4718-8409-849ac307168f/manager/0.log" Oct 01 07:14:59 crc kubenswrapper[4661]: I1001 07:14:59.095935 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6fb7d6b8bf-f248r_5fbac0a5-84db-4228-9c2c-93d8d551044d/manager/0.log" Oct 01 07:14:59 crc kubenswrapper[4661]: I1001 07:14:59.234202 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k_8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb/kube-rbac-proxy/0.log" Oct 01 07:14:59 crc kubenswrapper[4661]: I1001 07:14:59.234369 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-cc9cf7895-cqf7k_8b8c62a5-e7a4-4985-b2e6-f0e817c69dfb/manager/0.log" Oct 01 07:14:59 crc kubenswrapper[4661]: I1001 07:14:59.393374 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-8648d97544-tlsf9_68f9b331-6beb-4cda-884c-326180cb52c8/kube-rbac-proxy/0.log" Oct 01 07:14:59 crc kubenswrapper[4661]: I1001 07:14:59.486873 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-8d79f897-dcwff_12436698-76a2-4877-8d43-8af3c769ec32/kube-rbac-proxy/0.log" Oct 01 07:14:59 crc kubenswrapper[4661]: I1001 07:14:59.728326 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-8d79f897-dcwff_12436698-76a2-4877-8d43-8af3c769ec32/operator/0.log" Oct 01 07:14:59 crc kubenswrapper[4661]: I1001 07:14:59.785751 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-cvddl_2dc07b8e-5f1a-46cc-a33d-ffc63239d05a/registry-server/0.log" Oct 01 07:14:59 crc kubenswrapper[4661]: I1001 07:14:59.901125 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-84c745747f-lhhfj_01c6b4d6-55f6-4837-af54-9eb764262d03/kube-rbac-proxy/0.log" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.035724 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-598c4c8547-xs8zb_3ad43452-d673-42a7-8495-887b5e93cacb/kube-rbac-proxy/0.log" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.064924 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-84c745747f-lhhfj_01c6b4d6-55f6-4837-af54-9eb764262d03/manager/0.log" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.141146 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-598c4c8547-xs8zb_3ad43452-d673-42a7-8495-887b5e93cacb/manager/0.log" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.160261 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321715-fr94t"] Oct 01 07:15:00 crc kubenswrapper[4661]: E1001 07:15:00.160850 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b365a42d-0abd-4018-bffe-700c99c1e08c" containerName="container-00" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.160876 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="b365a42d-0abd-4018-bffe-700c99c1e08c" containerName="container-00" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.161132 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="b365a42d-0abd-4018-bffe-700c99c1e08c" containerName="container-00" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.162095 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321715-fr94t" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.163726 4661 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.163932 4661 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.176984 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321715-fr94t"] Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.218159 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/450d5ec8-e8cb-4357-8e47-e330b66162ea-secret-volume\") pod \"collect-profiles-29321715-fr94t\" (UID: \"450d5ec8-e8cb-4357-8e47-e330b66162ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321715-fr94t" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.218421 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/450d5ec8-e8cb-4357-8e47-e330b66162ea-config-volume\") pod \"collect-profiles-29321715-fr94t\" (UID: \"450d5ec8-e8cb-4357-8e47-e330b66162ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321715-fr94t" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.218489 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fld5q\" (UniqueName: \"kubernetes.io/projected/450d5ec8-e8cb-4357-8e47-e330b66162ea-kube-api-access-fld5q\") pod \"collect-profiles-29321715-fr94t\" (UID: \"450d5ec8-e8cb-4357-8e47-e330b66162ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321715-fr94t" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.311970 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-5n77c_ff6dc8a8-7a0a-47db-9aec-dbc1f8236beb/operator/0.log" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.319657 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fld5q\" (UniqueName: \"kubernetes.io/projected/450d5ec8-e8cb-4357-8e47-e330b66162ea-kube-api-access-fld5q\") pod \"collect-profiles-29321715-fr94t\" (UID: \"450d5ec8-e8cb-4357-8e47-e330b66162ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321715-fr94t" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.319763 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/450d5ec8-e8cb-4357-8e47-e330b66162ea-secret-volume\") pod \"collect-profiles-29321715-fr94t\" (UID: \"450d5ec8-e8cb-4357-8e47-e330b66162ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321715-fr94t" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.319853 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/450d5ec8-e8cb-4357-8e47-e330b66162ea-config-volume\") pod \"collect-profiles-29321715-fr94t\" (UID: \"450d5ec8-e8cb-4357-8e47-e330b66162ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321715-fr94t" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.320557 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/450d5ec8-e8cb-4357-8e47-e330b66162ea-config-volume\") pod \"collect-profiles-29321715-fr94t\" (UID: \"450d5ec8-e8cb-4357-8e47-e330b66162ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321715-fr94t" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.326822 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/450d5ec8-e8cb-4357-8e47-e330b66162ea-secret-volume\") pod \"collect-profiles-29321715-fr94t\" (UID: \"450d5ec8-e8cb-4357-8e47-e330b66162ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321715-fr94t" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.345585 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fld5q\" (UniqueName: \"kubernetes.io/projected/450d5ec8-e8cb-4357-8e47-e330b66162ea-kube-api-access-fld5q\") pod \"collect-profiles-29321715-fr94t\" (UID: \"450d5ec8-e8cb-4357-8e47-e330b66162ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321715-fr94t" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.363506 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-657c6b68c7-cv29g_a9e55ad4-d1a5-4830-96db-02c95384650c/kube-rbac-proxy/0.log" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.492406 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321715-fr94t" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.551119 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-cb66d6b59-cd2rk_5826e57e-36e6-43e0-8141-7e6e3ae936a6/kube-rbac-proxy/0.log" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.552028 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-657c6b68c7-cv29g_a9e55ad4-d1a5-4830-96db-02c95384650c/manager/0.log" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.731922 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-8648d97544-tlsf9_68f9b331-6beb-4cda-884c-326180cb52c8/manager/0.log" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.767861 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-6bb97fcf96-jsj4d_838837a9-4076-41ba-91e4-44055ce7c97a/manager/0.log" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.845986 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-6bb97fcf96-jsj4d_838837a9-4076-41ba-91e4-44055ce7c97a/kube-rbac-proxy/0.log" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.906138 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-cb66d6b59-cd2rk_5826e57e-36e6-43e0-8141-7e6e3ae936a6/manager/0.log" Oct 01 07:15:00 crc kubenswrapper[4661]: I1001 07:15:00.986229 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-75756dd4d9-2hjs8_1789dd27-2b5e-46e6-9260-affd4daf86cb/kube-rbac-proxy/0.log" Oct 01 07:15:01 crc kubenswrapper[4661]: I1001 07:15:01.060111 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321715-fr94t"] Oct 01 07:15:01 crc kubenswrapper[4661]: I1001 07:15:01.094791 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-75756dd4d9-2hjs8_1789dd27-2b5e-46e6-9260-affd4daf86cb/manager/0.log" Oct 01 07:15:01 crc kubenswrapper[4661]: I1001 07:15:01.690990 4661 generic.go:334] "Generic (PLEG): container finished" podID="450d5ec8-e8cb-4357-8e47-e330b66162ea" containerID="adfbcd48e778d2847af0256923055b2f0ba8efca32515d8d7df1959861b9189a" exitCode=0 Oct 01 07:15:01 crc kubenswrapper[4661]: I1001 07:15:01.691073 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321715-fr94t" event={"ID":"450d5ec8-e8cb-4357-8e47-e330b66162ea","Type":"ContainerDied","Data":"adfbcd48e778d2847af0256923055b2f0ba8efca32515d8d7df1959861b9189a"} Oct 01 07:15:01 crc kubenswrapper[4661]: I1001 07:15:01.691526 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321715-fr94t" event={"ID":"450d5ec8-e8cb-4357-8e47-e330b66162ea","Type":"ContainerStarted","Data":"63cd671e550cbaab636a73dadece30672503b418ce43001c4a1a1551cac55d0f"} Oct 01 07:15:03 crc kubenswrapper[4661]: I1001 07:15:03.097223 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321715-fr94t" Oct 01 07:15:03 crc kubenswrapper[4661]: I1001 07:15:03.175882 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fld5q\" (UniqueName: \"kubernetes.io/projected/450d5ec8-e8cb-4357-8e47-e330b66162ea-kube-api-access-fld5q\") pod \"450d5ec8-e8cb-4357-8e47-e330b66162ea\" (UID: \"450d5ec8-e8cb-4357-8e47-e330b66162ea\") " Oct 01 07:15:03 crc kubenswrapper[4661]: I1001 07:15:03.176064 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/450d5ec8-e8cb-4357-8e47-e330b66162ea-config-volume\") pod \"450d5ec8-e8cb-4357-8e47-e330b66162ea\" (UID: \"450d5ec8-e8cb-4357-8e47-e330b66162ea\") " Oct 01 07:15:03 crc kubenswrapper[4661]: I1001 07:15:03.176088 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/450d5ec8-e8cb-4357-8e47-e330b66162ea-secret-volume\") pod \"450d5ec8-e8cb-4357-8e47-e330b66162ea\" (UID: \"450d5ec8-e8cb-4357-8e47-e330b66162ea\") " Oct 01 07:15:03 crc kubenswrapper[4661]: I1001 07:15:03.176880 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/450d5ec8-e8cb-4357-8e47-e330b66162ea-config-volume" (OuterVolumeSpecName: "config-volume") pod "450d5ec8-e8cb-4357-8e47-e330b66162ea" (UID: "450d5ec8-e8cb-4357-8e47-e330b66162ea"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 07:15:03 crc kubenswrapper[4661]: I1001 07:15:03.184034 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/450d5ec8-e8cb-4357-8e47-e330b66162ea-kube-api-access-fld5q" (OuterVolumeSpecName: "kube-api-access-fld5q") pod "450d5ec8-e8cb-4357-8e47-e330b66162ea" (UID: "450d5ec8-e8cb-4357-8e47-e330b66162ea"). InnerVolumeSpecName "kube-api-access-fld5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 07:15:03 crc kubenswrapper[4661]: I1001 07:15:03.184076 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/450d5ec8-e8cb-4357-8e47-e330b66162ea-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "450d5ec8-e8cb-4357-8e47-e330b66162ea" (UID: "450d5ec8-e8cb-4357-8e47-e330b66162ea"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 07:15:03 crc kubenswrapper[4661]: I1001 07:15:03.278112 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fld5q\" (UniqueName: \"kubernetes.io/projected/450d5ec8-e8cb-4357-8e47-e330b66162ea-kube-api-access-fld5q\") on node \"crc\" DevicePath \"\"" Oct 01 07:15:03 crc kubenswrapper[4661]: I1001 07:15:03.278147 4661 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/450d5ec8-e8cb-4357-8e47-e330b66162ea-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 07:15:03 crc kubenswrapper[4661]: I1001 07:15:03.278157 4661 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/450d5ec8-e8cb-4357-8e47-e330b66162ea-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 07:15:03 crc kubenswrapper[4661]: I1001 07:15:03.710817 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321715-fr94t" event={"ID":"450d5ec8-e8cb-4357-8e47-e330b66162ea","Type":"ContainerDied","Data":"63cd671e550cbaab636a73dadece30672503b418ce43001c4a1a1551cac55d0f"} Oct 01 07:15:03 crc kubenswrapper[4661]: I1001 07:15:03.710861 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="63cd671e550cbaab636a73dadece30672503b418ce43001c4a1a1551cac55d0f" Oct 01 07:15:03 crc kubenswrapper[4661]: I1001 07:15:03.710895 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321715-fr94t" Oct 01 07:15:04 crc kubenswrapper[4661]: I1001 07:15:04.162549 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw"] Oct 01 07:15:04 crc kubenswrapper[4661]: I1001 07:15:04.172125 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321670-vllbw"] Oct 01 07:15:04 crc kubenswrapper[4661]: I1001 07:15:04.309505 4661 patch_prober.go:28] interesting pod/machine-config-daemon-wp2wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 07:15:04 crc kubenswrapper[4661]: I1001 07:15:04.309580 4661 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 07:15:04 crc kubenswrapper[4661]: I1001 07:15:04.309650 4661 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" Oct 01 07:15:04 crc kubenswrapper[4661]: I1001 07:15:04.310447 4661 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4"} pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 07:15:04 crc kubenswrapper[4661]: I1001 07:15:04.310511 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" containerName="machine-config-daemon" containerID="cri-o://57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" gracePeriod=600 Oct 01 07:15:04 crc kubenswrapper[4661]: E1001 07:15:04.434535 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:15:04 crc kubenswrapper[4661]: I1001 07:15:04.722596 4661 generic.go:334] "Generic (PLEG): container finished" podID="7584c4bc-4202-487e-a2b4-4319f428a792" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" exitCode=0 Oct 01 07:15:04 crc kubenswrapper[4661]: I1001 07:15:04.722678 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerDied","Data":"57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4"} Oct 01 07:15:04 crc kubenswrapper[4661]: I1001 07:15:04.723047 4661 scope.go:117] "RemoveContainer" containerID="28526f72b1fe65b0c2fcdfe6790ed07574eed5d583552550ccd6635fb178a38e" Oct 01 07:15:04 crc kubenswrapper[4661]: I1001 07:15:04.723845 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:15:04 crc kubenswrapper[4661]: E1001 07:15:04.724166 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:15:05 crc kubenswrapper[4661]: I1001 07:15:05.766364 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc3c5e7f-a131-4f27-8c62-57083890017a" path="/var/lib/kubelet/pods/bc3c5e7f-a131-4f27-8c62-57083890017a/volumes" Oct 01 07:15:16 crc kubenswrapper[4661]: I1001 07:15:16.757299 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:15:16 crc kubenswrapper[4661]: E1001 07:15:16.758015 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:15:17 crc kubenswrapper[4661]: I1001 07:15:17.301604 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-kqlm6_61ab52df-4ef3-4f24-a8f3-01c7fef8c99f/control-plane-machine-set-operator/0.log" Oct 01 07:15:17 crc kubenswrapper[4661]: I1001 07:15:17.434668 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-sppsj_0c21d97e-1221-464d-ae54-56ea6e626e00/machine-api-operator/0.log" Oct 01 07:15:17 crc kubenswrapper[4661]: I1001 07:15:17.434968 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-sppsj_0c21d97e-1221-464d-ae54-56ea6e626e00/kube-rbac-proxy/0.log" Oct 01 07:15:25 crc kubenswrapper[4661]: I1001 07:15:25.542412 4661 scope.go:117] "RemoveContainer" containerID="11af60f107021aa9135b7cbe5117d6777ef7cedbf28bde0bb12107b47cd0a000" Oct 01 07:15:29 crc kubenswrapper[4661]: I1001 07:15:29.634264 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-jpkgm_c676e4ec-6d7a-48a8-a54e-cb33046615f3/cert-manager-controller/0.log" Oct 01 07:15:29 crc kubenswrapper[4661]: I1001 07:15:29.757695 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:15:29 crc kubenswrapper[4661]: E1001 07:15:29.758046 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:15:29 crc kubenswrapper[4661]: I1001 07:15:29.769844 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-qx25f_c4de8059-947f-4d97-ad30-a2a3e1081b19/cert-manager-cainjector/0.log" Oct 01 07:15:29 crc kubenswrapper[4661]: I1001 07:15:29.824392 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-4tqmh_8206bae1-8eaa-4f6b-9531-9c200316c97c/cert-manager-webhook/0.log" Oct 01 07:15:40 crc kubenswrapper[4661]: I1001 07:15:40.757147 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:15:40 crc kubenswrapper[4661]: E1001 07:15:40.758201 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:15:42 crc kubenswrapper[4661]: I1001 07:15:42.959757 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-xpx7n_db1944d4-6048-4e0c-86fc-3f37d4a653bc/nmstate-console-plugin/0.log" Oct 01 07:15:43 crc kubenswrapper[4661]: I1001 07:15:43.312282 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-jp5md_49f9f7ff-ea7a-42c4-ad95-e8a05841ab36/nmstate-handler/0.log" Oct 01 07:15:43 crc kubenswrapper[4661]: I1001 07:15:43.380963 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-9nbcr_06d34118-bb4a-4b6f-9637-2fdac6465088/kube-rbac-proxy/0.log" Oct 01 07:15:43 crc kubenswrapper[4661]: I1001 07:15:43.427031 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-9nbcr_06d34118-bb4a-4b6f-9637-2fdac6465088/nmstate-metrics/0.log" Oct 01 07:15:43 crc kubenswrapper[4661]: I1001 07:15:43.563374 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-6gctp_185fc7e0-5985-4401-9b50-60a661708075/nmstate-operator/0.log" Oct 01 07:15:43 crc kubenswrapper[4661]: I1001 07:15:43.652493 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-qkzcj_b4aa2a05-edba-4a48-a854-8c05535af455/nmstate-webhook/0.log" Oct 01 07:15:53 crc kubenswrapper[4661]: I1001 07:15:53.757269 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:15:53 crc kubenswrapper[4661]: E1001 07:15:53.757976 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:15:59 crc kubenswrapper[4661]: I1001 07:15:59.177009 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-xfbgw_27288f70-f17e-4362-b115-c0c69e26aa91/kube-rbac-proxy/0.log" Oct 01 07:15:59 crc kubenswrapper[4661]: I1001 07:15:59.293002 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-xfbgw_27288f70-f17e-4362-b115-c0c69e26aa91/controller/0.log" Oct 01 07:15:59 crc kubenswrapper[4661]: I1001 07:15:59.383525 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-frr-files/0.log" Oct 01 07:15:59 crc kubenswrapper[4661]: I1001 07:15:59.534572 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-frr-files/0.log" Oct 01 07:15:59 crc kubenswrapper[4661]: I1001 07:15:59.552690 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-metrics/0.log" Oct 01 07:15:59 crc kubenswrapper[4661]: I1001 07:15:59.564306 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-reloader/0.log" Oct 01 07:15:59 crc kubenswrapper[4661]: I1001 07:15:59.585434 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-reloader/0.log" Oct 01 07:15:59 crc kubenswrapper[4661]: I1001 07:15:59.740492 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-metrics/0.log" Oct 01 07:15:59 crc kubenswrapper[4661]: I1001 07:15:59.746043 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-reloader/0.log" Oct 01 07:15:59 crc kubenswrapper[4661]: I1001 07:15:59.748026 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-frr-files/0.log" Oct 01 07:15:59 crc kubenswrapper[4661]: I1001 07:15:59.783020 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-metrics/0.log" Oct 01 07:15:59 crc kubenswrapper[4661]: I1001 07:15:59.948340 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-metrics/0.log" Oct 01 07:15:59 crc kubenswrapper[4661]: I1001 07:15:59.951037 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/controller/0.log" Oct 01 07:15:59 crc kubenswrapper[4661]: I1001 07:15:59.962902 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-frr-files/0.log" Oct 01 07:15:59 crc kubenswrapper[4661]: I1001 07:15:59.982593 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/cp-reloader/0.log" Oct 01 07:16:00 crc kubenswrapper[4661]: I1001 07:16:00.130575 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/frr-metrics/0.log" Oct 01 07:16:00 crc kubenswrapper[4661]: I1001 07:16:00.153363 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/kube-rbac-proxy/0.log" Oct 01 07:16:00 crc kubenswrapper[4661]: I1001 07:16:00.202443 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/kube-rbac-proxy-frr/0.log" Oct 01 07:16:00 crc kubenswrapper[4661]: I1001 07:16:00.369094 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/reloader/0.log" Oct 01 07:16:00 crc kubenswrapper[4661]: I1001 07:16:00.435215 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-zp8cf_41e4d93f-6473-4aff-a0b0-e76588bdf2a6/frr-k8s-webhook-server/0.log" Oct 01 07:16:00 crc kubenswrapper[4661]: I1001 07:16:00.612868 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5648555f8f-nqmlz_db1e2c00-b138-4835-a53b-4cb169f585eb/manager/0.log" Oct 01 07:16:00 crc kubenswrapper[4661]: I1001 07:16:00.839727 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-68b4bd4bc7-stfmz_fda1aa7d-8361-476a-b52c-db60416d47c5/webhook-server/0.log" Oct 01 07:16:00 crc kubenswrapper[4661]: I1001 07:16:00.864426 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-x4czl_41216ce7-4a7f-43c7-995e-081c9849f9bf/kube-rbac-proxy/0.log" Oct 01 07:16:01 crc kubenswrapper[4661]: I1001 07:16:01.528364 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-x4czl_41216ce7-4a7f-43c7-995e-081c9849f9bf/speaker/0.log" Oct 01 07:16:01 crc kubenswrapper[4661]: I1001 07:16:01.921223 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jk5qx_6872061b-610d-47c4-bcea-d3a9b9e507f4/frr/0.log" Oct 01 07:16:02 crc kubenswrapper[4661]: I1001 07:16:02.367033 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rt62v"] Oct 01 07:16:02 crc kubenswrapper[4661]: E1001 07:16:02.367417 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="450d5ec8-e8cb-4357-8e47-e330b66162ea" containerName="collect-profiles" Oct 01 07:16:02 crc kubenswrapper[4661]: I1001 07:16:02.367433 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="450d5ec8-e8cb-4357-8e47-e330b66162ea" containerName="collect-profiles" Oct 01 07:16:02 crc kubenswrapper[4661]: I1001 07:16:02.367622 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="450d5ec8-e8cb-4357-8e47-e330b66162ea" containerName="collect-profiles" Oct 01 07:16:02 crc kubenswrapper[4661]: I1001 07:16:02.368921 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rt62v" Oct 01 07:16:02 crc kubenswrapper[4661]: I1001 07:16:02.384309 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rt62v"] Oct 01 07:16:02 crc kubenswrapper[4661]: I1001 07:16:02.500325 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/417dab25-339f-4d8f-a938-437a607fe627-utilities\") pod \"redhat-operators-rt62v\" (UID: \"417dab25-339f-4d8f-a938-437a607fe627\") " pod="openshift-marketplace/redhat-operators-rt62v" Oct 01 07:16:02 crc kubenswrapper[4661]: I1001 07:16:02.500502 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqvkr\" (UniqueName: \"kubernetes.io/projected/417dab25-339f-4d8f-a938-437a607fe627-kube-api-access-vqvkr\") pod \"redhat-operators-rt62v\" (UID: \"417dab25-339f-4d8f-a938-437a607fe627\") " pod="openshift-marketplace/redhat-operators-rt62v" Oct 01 07:16:02 crc kubenswrapper[4661]: I1001 07:16:02.500552 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/417dab25-339f-4d8f-a938-437a607fe627-catalog-content\") pod \"redhat-operators-rt62v\" (UID: \"417dab25-339f-4d8f-a938-437a607fe627\") " pod="openshift-marketplace/redhat-operators-rt62v" Oct 01 07:16:02 crc kubenswrapper[4661]: I1001 07:16:02.602429 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqvkr\" (UniqueName: \"kubernetes.io/projected/417dab25-339f-4d8f-a938-437a607fe627-kube-api-access-vqvkr\") pod \"redhat-operators-rt62v\" (UID: \"417dab25-339f-4d8f-a938-437a607fe627\") " pod="openshift-marketplace/redhat-operators-rt62v" Oct 01 07:16:02 crc kubenswrapper[4661]: I1001 07:16:02.602501 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/417dab25-339f-4d8f-a938-437a607fe627-catalog-content\") pod \"redhat-operators-rt62v\" (UID: \"417dab25-339f-4d8f-a938-437a607fe627\") " pod="openshift-marketplace/redhat-operators-rt62v" Oct 01 07:16:02 crc kubenswrapper[4661]: I1001 07:16:02.602548 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/417dab25-339f-4d8f-a938-437a607fe627-utilities\") pod \"redhat-operators-rt62v\" (UID: \"417dab25-339f-4d8f-a938-437a607fe627\") " pod="openshift-marketplace/redhat-operators-rt62v" Oct 01 07:16:02 crc kubenswrapper[4661]: I1001 07:16:02.603186 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/417dab25-339f-4d8f-a938-437a607fe627-utilities\") pod \"redhat-operators-rt62v\" (UID: \"417dab25-339f-4d8f-a938-437a607fe627\") " pod="openshift-marketplace/redhat-operators-rt62v" Oct 01 07:16:02 crc kubenswrapper[4661]: I1001 07:16:02.603208 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/417dab25-339f-4d8f-a938-437a607fe627-catalog-content\") pod \"redhat-operators-rt62v\" (UID: \"417dab25-339f-4d8f-a938-437a607fe627\") " pod="openshift-marketplace/redhat-operators-rt62v" Oct 01 07:16:02 crc kubenswrapper[4661]: I1001 07:16:02.628465 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqvkr\" (UniqueName: \"kubernetes.io/projected/417dab25-339f-4d8f-a938-437a607fe627-kube-api-access-vqvkr\") pod \"redhat-operators-rt62v\" (UID: \"417dab25-339f-4d8f-a938-437a607fe627\") " pod="openshift-marketplace/redhat-operators-rt62v" Oct 01 07:16:02 crc kubenswrapper[4661]: I1001 07:16:02.694571 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rt62v" Oct 01 07:16:03 crc kubenswrapper[4661]: I1001 07:16:03.207153 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rt62v"] Oct 01 07:16:03 crc kubenswrapper[4661]: I1001 07:16:03.350295 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rt62v" event={"ID":"417dab25-339f-4d8f-a938-437a607fe627","Type":"ContainerStarted","Data":"be15eb21dd5fcd53c647edd251a6a652ddf7d3e6dc7f1d22681b954e3fa20b8d"} Oct 01 07:16:04 crc kubenswrapper[4661]: I1001 07:16:04.362497 4661 generic.go:334] "Generic (PLEG): container finished" podID="417dab25-339f-4d8f-a938-437a607fe627" containerID="8ef63abc5bd8591756e6835ed04b2a5e181d2eb657e65351c3831bd7d87e2944" exitCode=0 Oct 01 07:16:04 crc kubenswrapper[4661]: I1001 07:16:04.362586 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rt62v" event={"ID":"417dab25-339f-4d8f-a938-437a607fe627","Type":"ContainerDied","Data":"8ef63abc5bd8591756e6835ed04b2a5e181d2eb657e65351c3831bd7d87e2944"} Oct 01 07:16:04 crc kubenswrapper[4661]: I1001 07:16:04.364924 4661 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 07:16:06 crc kubenswrapper[4661]: I1001 07:16:06.386138 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rt62v" event={"ID":"417dab25-339f-4d8f-a938-437a607fe627","Type":"ContainerStarted","Data":"11cdfd91bc61c19874878562144873843f2b5d8362e40259811d048800b16e58"} Oct 01 07:16:06 crc kubenswrapper[4661]: I1001 07:16:06.756969 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:16:06 crc kubenswrapper[4661]: E1001 07:16:06.757531 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:16:08 crc kubenswrapper[4661]: I1001 07:16:08.408625 4661 generic.go:334] "Generic (PLEG): container finished" podID="417dab25-339f-4d8f-a938-437a607fe627" containerID="11cdfd91bc61c19874878562144873843f2b5d8362e40259811d048800b16e58" exitCode=0 Oct 01 07:16:08 crc kubenswrapper[4661]: I1001 07:16:08.408679 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rt62v" event={"ID":"417dab25-339f-4d8f-a938-437a607fe627","Type":"ContainerDied","Data":"11cdfd91bc61c19874878562144873843f2b5d8362e40259811d048800b16e58"} Oct 01 07:16:09 crc kubenswrapper[4661]: I1001 07:16:09.422307 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rt62v" event={"ID":"417dab25-339f-4d8f-a938-437a607fe627","Type":"ContainerStarted","Data":"94f45dfb3da6c129914fe8a7ca830801eeb485ecf3177727b439f65b6c9e4b36"} Oct 01 07:16:09 crc kubenswrapper[4661]: I1001 07:16:09.453841 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rt62v" podStartSLOduration=2.939879491 podStartE2EDuration="7.453818602s" podCreationTimestamp="2025-10-01 07:16:02 +0000 UTC" firstStartedPulling="2025-10-01 07:16:04.364610055 +0000 UTC m=+6413.302588669" lastFinishedPulling="2025-10-01 07:16:08.878549156 +0000 UTC m=+6417.816527780" observedRunningTime="2025-10-01 07:16:09.444894968 +0000 UTC m=+6418.382873592" watchObservedRunningTime="2025-10-01 07:16:09.453818602 +0000 UTC m=+6418.391797216" Oct 01 07:16:12 crc kubenswrapper[4661]: I1001 07:16:12.695086 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rt62v" Oct 01 07:16:12 crc kubenswrapper[4661]: I1001 07:16:12.695800 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rt62v" Oct 01 07:16:13 crc kubenswrapper[4661]: I1001 07:16:13.768279 4661 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rt62v" podUID="417dab25-339f-4d8f-a938-437a607fe627" containerName="registry-server" probeResult="failure" output=< Oct 01 07:16:13 crc kubenswrapper[4661]: timeout: failed to connect service ":50051" within 1s Oct 01 07:16:13 crc kubenswrapper[4661]: > Oct 01 07:16:15 crc kubenswrapper[4661]: I1001 07:16:15.265209 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk_4a49ff2d-3e51-4e56-9062-58c965ab69a3/util/0.log" Oct 01 07:16:15 crc kubenswrapper[4661]: I1001 07:16:15.379368 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk_4a49ff2d-3e51-4e56-9062-58c965ab69a3/util/0.log" Oct 01 07:16:15 crc kubenswrapper[4661]: I1001 07:16:15.390387 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk_4a49ff2d-3e51-4e56-9062-58c965ab69a3/pull/0.log" Oct 01 07:16:15 crc kubenswrapper[4661]: I1001 07:16:15.441671 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk_4a49ff2d-3e51-4e56-9062-58c965ab69a3/pull/0.log" Oct 01 07:16:15 crc kubenswrapper[4661]: I1001 07:16:15.599099 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk_4a49ff2d-3e51-4e56-9062-58c965ab69a3/extract/0.log" Oct 01 07:16:15 crc kubenswrapper[4661]: I1001 07:16:15.641460 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk_4a49ff2d-3e51-4e56-9062-58c965ab69a3/util/0.log" Oct 01 07:16:15 crc kubenswrapper[4661]: I1001 07:16:15.652729 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bczprwk_4a49ff2d-3e51-4e56-9062-58c965ab69a3/pull/0.log" Oct 01 07:16:15 crc kubenswrapper[4661]: I1001 07:16:15.750113 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk_a5baaec9-0520-4cc9-9056-f4729497a734/util/0.log" Oct 01 07:16:15 crc kubenswrapper[4661]: I1001 07:16:15.994440 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk_a5baaec9-0520-4cc9-9056-f4729497a734/pull/0.log" Oct 01 07:16:16 crc kubenswrapper[4661]: I1001 07:16:16.257130 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk_a5baaec9-0520-4cc9-9056-f4729497a734/util/0.log" Oct 01 07:16:16 crc kubenswrapper[4661]: I1001 07:16:16.339075 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk_a5baaec9-0520-4cc9-9056-f4729497a734/pull/0.log" Oct 01 07:16:16 crc kubenswrapper[4661]: I1001 07:16:16.407214 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk_a5baaec9-0520-4cc9-9056-f4729497a734/pull/0.log" Oct 01 07:16:16 crc kubenswrapper[4661]: I1001 07:16:16.424216 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk_a5baaec9-0520-4cc9-9056-f4729497a734/extract/0.log" Oct 01 07:16:16 crc kubenswrapper[4661]: I1001 07:16:16.428550 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dzckdk_a5baaec9-0520-4cc9-9056-f4729497a734/util/0.log" Oct 01 07:16:16 crc kubenswrapper[4661]: I1001 07:16:16.592881 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwgw_de60a77e-3fb7-4777-94de-54b40db66c7a/extract-utilities/0.log" Oct 01 07:16:16 crc kubenswrapper[4661]: I1001 07:16:16.729019 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwgw_de60a77e-3fb7-4777-94de-54b40db66c7a/extract-utilities/0.log" Oct 01 07:16:16 crc kubenswrapper[4661]: I1001 07:16:16.743715 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwgw_de60a77e-3fb7-4777-94de-54b40db66c7a/extract-content/0.log" Oct 01 07:16:16 crc kubenswrapper[4661]: I1001 07:16:16.746504 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwgw_de60a77e-3fb7-4777-94de-54b40db66c7a/extract-content/0.log" Oct 01 07:16:16 crc kubenswrapper[4661]: I1001 07:16:16.908363 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwgw_de60a77e-3fb7-4777-94de-54b40db66c7a/extract-content/0.log" Oct 01 07:16:16 crc kubenswrapper[4661]: I1001 07:16:16.926597 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwgw_de60a77e-3fb7-4777-94de-54b40db66c7a/extract-utilities/0.log" Oct 01 07:16:17 crc kubenswrapper[4661]: I1001 07:16:17.074545 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-lqlkm_918474ca-d1fb-44a1-a07f-96c072ded353/extract-utilities/0.log" Oct 01 07:16:17 crc kubenswrapper[4661]: I1001 07:16:17.265099 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-lqlkm_918474ca-d1fb-44a1-a07f-96c072ded353/extract-content/0.log" Oct 01 07:16:17 crc kubenswrapper[4661]: I1001 07:16:17.322068 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-lqlkm_918474ca-d1fb-44a1-a07f-96c072ded353/extract-utilities/0.log" Oct 01 07:16:17 crc kubenswrapper[4661]: I1001 07:16:17.349817 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-lqlkm_918474ca-d1fb-44a1-a07f-96c072ded353/extract-content/0.log" Oct 01 07:16:17 crc kubenswrapper[4661]: I1001 07:16:17.566785 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-lqlkm_918474ca-d1fb-44a1-a07f-96c072ded353/extract-content/0.log" Oct 01 07:16:17 crc kubenswrapper[4661]: I1001 07:16:17.617495 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-lqlkm_918474ca-d1fb-44a1-a07f-96c072ded353/extract-utilities/0.log" Oct 01 07:16:17 crc kubenswrapper[4661]: I1001 07:16:17.655471 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwgw_de60a77e-3fb7-4777-94de-54b40db66c7a/registry-server/0.log" Oct 01 07:16:17 crc kubenswrapper[4661]: I1001 07:16:17.775747 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr_d7850164-3b1e-4bd1-b8c8-691c54963d36/util/0.log" Oct 01 07:16:18 crc kubenswrapper[4661]: I1001 07:16:18.015685 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr_d7850164-3b1e-4bd1-b8c8-691c54963d36/util/0.log" Oct 01 07:16:18 crc kubenswrapper[4661]: I1001 07:16:18.019735 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr_d7850164-3b1e-4bd1-b8c8-691c54963d36/pull/0.log" Oct 01 07:16:18 crc kubenswrapper[4661]: I1001 07:16:18.084973 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr_d7850164-3b1e-4bd1-b8c8-691c54963d36/pull/0.log" Oct 01 07:16:18 crc kubenswrapper[4661]: I1001 07:16:18.282609 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr_d7850164-3b1e-4bd1-b8c8-691c54963d36/pull/0.log" Oct 01 07:16:18 crc kubenswrapper[4661]: I1001 07:16:18.318934 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr_d7850164-3b1e-4bd1-b8c8-691c54963d36/util/0.log" Oct 01 07:16:18 crc kubenswrapper[4661]: I1001 07:16:18.345533 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96zgnqr_d7850164-3b1e-4bd1-b8c8-691c54963d36/extract/0.log" Oct 01 07:16:18 crc kubenswrapper[4661]: I1001 07:16:18.446423 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-lqlkm_918474ca-d1fb-44a1-a07f-96c072ded353/registry-server/0.log" Oct 01 07:16:18 crc kubenswrapper[4661]: I1001 07:16:18.530040 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-xpqj9_af543e28-92e4-4c71-a1dc-1478f2c25169/marketplace-operator/0.log" Oct 01 07:16:18 crc kubenswrapper[4661]: I1001 07:16:18.585981 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tc8mt_1ec4e027-d0b0-4931-a551-2bfbd8769337/extract-utilities/0.log" Oct 01 07:16:18 crc kubenswrapper[4661]: I1001 07:16:18.740324 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tc8mt_1ec4e027-d0b0-4931-a551-2bfbd8769337/extract-content/0.log" Oct 01 07:16:18 crc kubenswrapper[4661]: I1001 07:16:18.740435 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tc8mt_1ec4e027-d0b0-4931-a551-2bfbd8769337/extract-content/0.log" Oct 01 07:16:18 crc kubenswrapper[4661]: I1001 07:16:18.760785 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tc8mt_1ec4e027-d0b0-4931-a551-2bfbd8769337/extract-utilities/0.log" Oct 01 07:16:18 crc kubenswrapper[4661]: I1001 07:16:18.963781 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tc8mt_1ec4e027-d0b0-4931-a551-2bfbd8769337/extract-utilities/0.log" Oct 01 07:16:19 crc kubenswrapper[4661]: I1001 07:16:19.003217 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tc8mt_1ec4e027-d0b0-4931-a551-2bfbd8769337/extract-content/0.log" Oct 01 07:16:19 crc kubenswrapper[4661]: I1001 07:16:19.029463 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-l56gs_2dccb83e-7a19-4707-b2cc-8c5f68ebc261/extract-utilities/0.log" Oct 01 07:16:19 crc kubenswrapper[4661]: I1001 07:16:19.172385 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tc8mt_1ec4e027-d0b0-4931-a551-2bfbd8769337/registry-server/0.log" Oct 01 07:16:19 crc kubenswrapper[4661]: I1001 07:16:19.218679 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-l56gs_2dccb83e-7a19-4707-b2cc-8c5f68ebc261/extract-utilities/0.log" Oct 01 07:16:19 crc kubenswrapper[4661]: I1001 07:16:19.235171 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-l56gs_2dccb83e-7a19-4707-b2cc-8c5f68ebc261/extract-content/0.log" Oct 01 07:16:19 crc kubenswrapper[4661]: I1001 07:16:19.258419 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-l56gs_2dccb83e-7a19-4707-b2cc-8c5f68ebc261/extract-content/0.log" Oct 01 07:16:19 crc kubenswrapper[4661]: I1001 07:16:19.360500 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-l56gs_2dccb83e-7a19-4707-b2cc-8c5f68ebc261/extract-content/0.log" Oct 01 07:16:19 crc kubenswrapper[4661]: I1001 07:16:19.368977 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-l56gs_2dccb83e-7a19-4707-b2cc-8c5f68ebc261/extract-utilities/0.log" Oct 01 07:16:19 crc kubenswrapper[4661]: I1001 07:16:19.520598 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rt62v_417dab25-339f-4d8f-a938-437a607fe627/extract-utilities/0.log" Oct 01 07:16:19 crc kubenswrapper[4661]: I1001 07:16:19.649194 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rt62v_417dab25-339f-4d8f-a938-437a607fe627/extract-utilities/0.log" Oct 01 07:16:19 crc kubenswrapper[4661]: I1001 07:16:19.691933 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rt62v_417dab25-339f-4d8f-a938-437a607fe627/extract-content/0.log" Oct 01 07:16:19 crc kubenswrapper[4661]: I1001 07:16:19.695913 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rt62v_417dab25-339f-4d8f-a938-437a607fe627/extract-content/0.log" Oct 01 07:16:19 crc kubenswrapper[4661]: I1001 07:16:19.756538 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:16:19 crc kubenswrapper[4661]: E1001 07:16:19.756863 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:16:19 crc kubenswrapper[4661]: I1001 07:16:19.843762 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rt62v_417dab25-339f-4d8f-a938-437a607fe627/extract-utilities/0.log" Oct 01 07:16:19 crc kubenswrapper[4661]: I1001 07:16:19.886242 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rt62v_417dab25-339f-4d8f-a938-437a607fe627/extract-content/0.log" Oct 01 07:16:19 crc kubenswrapper[4661]: I1001 07:16:19.939162 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rt62v_417dab25-339f-4d8f-a938-437a607fe627/registry-server/0.log" Oct 01 07:16:19 crc kubenswrapper[4661]: I1001 07:16:19.976615 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-l56gs_2dccb83e-7a19-4707-b2cc-8c5f68ebc261/registry-server/0.log" Oct 01 07:16:22 crc kubenswrapper[4661]: I1001 07:16:22.770484 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rt62v" Oct 01 07:16:22 crc kubenswrapper[4661]: I1001 07:16:22.851773 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rt62v" Oct 01 07:16:23 crc kubenswrapper[4661]: I1001 07:16:23.040369 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rt62v"] Oct 01 07:16:24 crc kubenswrapper[4661]: I1001 07:16:24.588423 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rt62v" podUID="417dab25-339f-4d8f-a938-437a607fe627" containerName="registry-server" containerID="cri-o://94f45dfb3da6c129914fe8a7ca830801eeb485ecf3177727b439f65b6c9e4b36" gracePeriod=2 Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.065530 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rt62v" Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.175627 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqvkr\" (UniqueName: \"kubernetes.io/projected/417dab25-339f-4d8f-a938-437a607fe627-kube-api-access-vqvkr\") pod \"417dab25-339f-4d8f-a938-437a607fe627\" (UID: \"417dab25-339f-4d8f-a938-437a607fe627\") " Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.175777 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/417dab25-339f-4d8f-a938-437a607fe627-utilities\") pod \"417dab25-339f-4d8f-a938-437a607fe627\" (UID: \"417dab25-339f-4d8f-a938-437a607fe627\") " Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.175889 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/417dab25-339f-4d8f-a938-437a607fe627-catalog-content\") pod \"417dab25-339f-4d8f-a938-437a607fe627\" (UID: \"417dab25-339f-4d8f-a938-437a607fe627\") " Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.176548 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/417dab25-339f-4d8f-a938-437a607fe627-utilities" (OuterVolumeSpecName: "utilities") pod "417dab25-339f-4d8f-a938-437a607fe627" (UID: "417dab25-339f-4d8f-a938-437a607fe627"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.183665 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/417dab25-339f-4d8f-a938-437a607fe627-kube-api-access-vqvkr" (OuterVolumeSpecName: "kube-api-access-vqvkr") pod "417dab25-339f-4d8f-a938-437a607fe627" (UID: "417dab25-339f-4d8f-a938-437a607fe627"). InnerVolumeSpecName "kube-api-access-vqvkr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.264735 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/417dab25-339f-4d8f-a938-437a607fe627-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "417dab25-339f-4d8f-a938-437a607fe627" (UID: "417dab25-339f-4d8f-a938-437a607fe627"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.278126 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/417dab25-339f-4d8f-a938-437a607fe627-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.278173 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/417dab25-339f-4d8f-a938-437a607fe627-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.278185 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqvkr\" (UniqueName: \"kubernetes.io/projected/417dab25-339f-4d8f-a938-437a607fe627-kube-api-access-vqvkr\") on node \"crc\" DevicePath \"\"" Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.603517 4661 generic.go:334] "Generic (PLEG): container finished" podID="417dab25-339f-4d8f-a938-437a607fe627" containerID="94f45dfb3da6c129914fe8a7ca830801eeb485ecf3177727b439f65b6c9e4b36" exitCode=0 Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.603587 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rt62v" Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.603701 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rt62v" event={"ID":"417dab25-339f-4d8f-a938-437a607fe627","Type":"ContainerDied","Data":"94f45dfb3da6c129914fe8a7ca830801eeb485ecf3177727b439f65b6c9e4b36"} Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.605541 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rt62v" event={"ID":"417dab25-339f-4d8f-a938-437a607fe627","Type":"ContainerDied","Data":"be15eb21dd5fcd53c647edd251a6a652ddf7d3e6dc7f1d22681b954e3fa20b8d"} Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.605586 4661 scope.go:117] "RemoveContainer" containerID="94f45dfb3da6c129914fe8a7ca830801eeb485ecf3177727b439f65b6c9e4b36" Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.635917 4661 scope.go:117] "RemoveContainer" containerID="11cdfd91bc61c19874878562144873843f2b5d8362e40259811d048800b16e58" Oct 01 07:16:25 crc kubenswrapper[4661]: E1001 07:16:25.639827 4661 kuberuntime_gc.go:389] "Failed to remove container log dead symlink" err="remove /var/log/containers/redhat-operators-rt62v_openshift-marketplace_extract-content-11cdfd91bc61c19874878562144873843f2b5d8362e40259811d048800b16e58.log: no such file or directory" path="/var/log/containers/redhat-operators-rt62v_openshift-marketplace_extract-content-11cdfd91bc61c19874878562144873843f2b5d8362e40259811d048800b16e58.log" Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.654107 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rt62v"] Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.661482 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rt62v"] Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.673343 4661 scope.go:117] "RemoveContainer" containerID="8ef63abc5bd8591756e6835ed04b2a5e181d2eb657e65351c3831bd7d87e2944" Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.728789 4661 scope.go:117] "RemoveContainer" containerID="94f45dfb3da6c129914fe8a7ca830801eeb485ecf3177727b439f65b6c9e4b36" Oct 01 07:16:25 crc kubenswrapper[4661]: E1001 07:16:25.729362 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94f45dfb3da6c129914fe8a7ca830801eeb485ecf3177727b439f65b6c9e4b36\": container with ID starting with 94f45dfb3da6c129914fe8a7ca830801eeb485ecf3177727b439f65b6c9e4b36 not found: ID does not exist" containerID="94f45dfb3da6c129914fe8a7ca830801eeb485ecf3177727b439f65b6c9e4b36" Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.729391 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94f45dfb3da6c129914fe8a7ca830801eeb485ecf3177727b439f65b6c9e4b36"} err="failed to get container status \"94f45dfb3da6c129914fe8a7ca830801eeb485ecf3177727b439f65b6c9e4b36\": rpc error: code = NotFound desc = could not find container \"94f45dfb3da6c129914fe8a7ca830801eeb485ecf3177727b439f65b6c9e4b36\": container with ID starting with 94f45dfb3da6c129914fe8a7ca830801eeb485ecf3177727b439f65b6c9e4b36 not found: ID does not exist" Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.729411 4661 scope.go:117] "RemoveContainer" containerID="11cdfd91bc61c19874878562144873843f2b5d8362e40259811d048800b16e58" Oct 01 07:16:25 crc kubenswrapper[4661]: E1001 07:16:25.729619 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11cdfd91bc61c19874878562144873843f2b5d8362e40259811d048800b16e58\": container with ID starting with 11cdfd91bc61c19874878562144873843f2b5d8362e40259811d048800b16e58 not found: ID does not exist" containerID="11cdfd91bc61c19874878562144873843f2b5d8362e40259811d048800b16e58" Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.729660 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11cdfd91bc61c19874878562144873843f2b5d8362e40259811d048800b16e58"} err="failed to get container status \"11cdfd91bc61c19874878562144873843f2b5d8362e40259811d048800b16e58\": rpc error: code = NotFound desc = could not find container \"11cdfd91bc61c19874878562144873843f2b5d8362e40259811d048800b16e58\": container with ID starting with 11cdfd91bc61c19874878562144873843f2b5d8362e40259811d048800b16e58 not found: ID does not exist" Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.729674 4661 scope.go:117] "RemoveContainer" containerID="8ef63abc5bd8591756e6835ed04b2a5e181d2eb657e65351c3831bd7d87e2944" Oct 01 07:16:25 crc kubenswrapper[4661]: E1001 07:16:25.729880 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ef63abc5bd8591756e6835ed04b2a5e181d2eb657e65351c3831bd7d87e2944\": container with ID starting with 8ef63abc5bd8591756e6835ed04b2a5e181d2eb657e65351c3831bd7d87e2944 not found: ID does not exist" containerID="8ef63abc5bd8591756e6835ed04b2a5e181d2eb657e65351c3831bd7d87e2944" Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.729918 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ef63abc5bd8591756e6835ed04b2a5e181d2eb657e65351c3831bd7d87e2944"} err="failed to get container status \"8ef63abc5bd8591756e6835ed04b2a5e181d2eb657e65351c3831bd7d87e2944\": rpc error: code = NotFound desc = could not find container \"8ef63abc5bd8591756e6835ed04b2a5e181d2eb657e65351c3831bd7d87e2944\": container with ID starting with 8ef63abc5bd8591756e6835ed04b2a5e181d2eb657e65351c3831bd7d87e2944 not found: ID does not exist" Oct 01 07:16:25 crc kubenswrapper[4661]: I1001 07:16:25.767486 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="417dab25-339f-4d8f-a938-437a607fe627" path="/var/lib/kubelet/pods/417dab25-339f-4d8f-a938-437a607fe627/volumes" Oct 01 07:16:32 crc kubenswrapper[4661]: I1001 07:16:32.757187 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:16:32 crc kubenswrapper[4661]: E1001 07:16:32.757947 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:16:33 crc kubenswrapper[4661]: I1001 07:16:33.061502 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-7c8cf85677-8qwx2_e4e8d27f-fbab-4c45-b182-73df1cef9061/prometheus-operator/0.log" Oct 01 07:16:33 crc kubenswrapper[4661]: I1001 07:16:33.191080 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6d9594f8bb-gnmb5_6f995c9b-5efa-4d54-be17-7d67186446c3/prometheus-operator-admission-webhook/0.log" Oct 01 07:16:33 crc kubenswrapper[4661]: I1001 07:16:33.251458 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6d9594f8bb-wfszf_cb7f3b2f-e219-4709-85e3-8b1df4b288bd/prometheus-operator-admission-webhook/0.log" Oct 01 07:16:33 crc kubenswrapper[4661]: I1001 07:16:33.382735 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-cc5f78dfc-scssh_a7062884-4a27-4396-b3f5-698aceda68d2/operator/0.log" Oct 01 07:16:33 crc kubenswrapper[4661]: I1001 07:16:33.455813 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-54bc95c9fb-pnpg7_58ef1745-3b19-4508-b099-100418c1a6d7/perses-operator/0.log" Oct 01 07:16:43 crc kubenswrapper[4661]: I1001 07:16:43.758594 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:16:43 crc kubenswrapper[4661]: E1001 07:16:43.759390 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:16:57 crc kubenswrapper[4661]: I1001 07:16:57.761184 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:16:57 crc kubenswrapper[4661]: E1001 07:16:57.762345 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:17:12 crc kubenswrapper[4661]: I1001 07:17:12.757735 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:17:12 crc kubenswrapper[4661]: E1001 07:17:12.758802 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:17:26 crc kubenswrapper[4661]: I1001 07:17:26.758108 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:17:26 crc kubenswrapper[4661]: E1001 07:17:26.759234 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:17:41 crc kubenswrapper[4661]: I1001 07:17:41.774236 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:17:41 crc kubenswrapper[4661]: E1001 07:17:41.775364 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:17:42 crc kubenswrapper[4661]: I1001 07:17:42.008964 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fvt4q"] Oct 01 07:17:42 crc kubenswrapper[4661]: E1001 07:17:42.009537 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="417dab25-339f-4d8f-a938-437a607fe627" containerName="registry-server" Oct 01 07:17:42 crc kubenswrapper[4661]: I1001 07:17:42.009561 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="417dab25-339f-4d8f-a938-437a607fe627" containerName="registry-server" Oct 01 07:17:42 crc kubenswrapper[4661]: E1001 07:17:42.009586 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="417dab25-339f-4d8f-a938-437a607fe627" containerName="extract-utilities" Oct 01 07:17:42 crc kubenswrapper[4661]: I1001 07:17:42.009595 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="417dab25-339f-4d8f-a938-437a607fe627" containerName="extract-utilities" Oct 01 07:17:42 crc kubenswrapper[4661]: E1001 07:17:42.009648 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="417dab25-339f-4d8f-a938-437a607fe627" containerName="extract-content" Oct 01 07:17:42 crc kubenswrapper[4661]: I1001 07:17:42.009662 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="417dab25-339f-4d8f-a938-437a607fe627" containerName="extract-content" Oct 01 07:17:42 crc kubenswrapper[4661]: I1001 07:17:42.009974 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="417dab25-339f-4d8f-a938-437a607fe627" containerName="registry-server" Oct 01 07:17:42 crc kubenswrapper[4661]: I1001 07:17:42.012098 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fvt4q" Oct 01 07:17:42 crc kubenswrapper[4661]: I1001 07:17:42.021498 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fvt4q"] Oct 01 07:17:42 crc kubenswrapper[4661]: I1001 07:17:42.171811 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzh5r\" (UniqueName: \"kubernetes.io/projected/d62e98e2-7bf5-43ca-9f5f-d0ade718d946-kube-api-access-pzh5r\") pod \"community-operators-fvt4q\" (UID: \"d62e98e2-7bf5-43ca-9f5f-d0ade718d946\") " pod="openshift-marketplace/community-operators-fvt4q" Oct 01 07:17:42 crc kubenswrapper[4661]: I1001 07:17:42.172115 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d62e98e2-7bf5-43ca-9f5f-d0ade718d946-catalog-content\") pod \"community-operators-fvt4q\" (UID: \"d62e98e2-7bf5-43ca-9f5f-d0ade718d946\") " pod="openshift-marketplace/community-operators-fvt4q" Oct 01 07:17:42 crc kubenswrapper[4661]: I1001 07:17:42.172446 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d62e98e2-7bf5-43ca-9f5f-d0ade718d946-utilities\") pod \"community-operators-fvt4q\" (UID: \"d62e98e2-7bf5-43ca-9f5f-d0ade718d946\") " pod="openshift-marketplace/community-operators-fvt4q" Oct 01 07:17:42 crc kubenswrapper[4661]: I1001 07:17:42.273684 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzh5r\" (UniqueName: \"kubernetes.io/projected/d62e98e2-7bf5-43ca-9f5f-d0ade718d946-kube-api-access-pzh5r\") pod \"community-operators-fvt4q\" (UID: \"d62e98e2-7bf5-43ca-9f5f-d0ade718d946\") " pod="openshift-marketplace/community-operators-fvt4q" Oct 01 07:17:42 crc kubenswrapper[4661]: I1001 07:17:42.273790 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d62e98e2-7bf5-43ca-9f5f-d0ade718d946-catalog-content\") pod \"community-operators-fvt4q\" (UID: \"d62e98e2-7bf5-43ca-9f5f-d0ade718d946\") " pod="openshift-marketplace/community-operators-fvt4q" Oct 01 07:17:42 crc kubenswrapper[4661]: I1001 07:17:42.273875 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d62e98e2-7bf5-43ca-9f5f-d0ade718d946-utilities\") pod \"community-operators-fvt4q\" (UID: \"d62e98e2-7bf5-43ca-9f5f-d0ade718d946\") " pod="openshift-marketplace/community-operators-fvt4q" Oct 01 07:17:42 crc kubenswrapper[4661]: I1001 07:17:42.274340 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d62e98e2-7bf5-43ca-9f5f-d0ade718d946-utilities\") pod \"community-operators-fvt4q\" (UID: \"d62e98e2-7bf5-43ca-9f5f-d0ade718d946\") " pod="openshift-marketplace/community-operators-fvt4q" Oct 01 07:17:42 crc kubenswrapper[4661]: I1001 07:17:42.274858 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d62e98e2-7bf5-43ca-9f5f-d0ade718d946-catalog-content\") pod \"community-operators-fvt4q\" (UID: \"d62e98e2-7bf5-43ca-9f5f-d0ade718d946\") " pod="openshift-marketplace/community-operators-fvt4q" Oct 01 07:17:42 crc kubenswrapper[4661]: I1001 07:17:42.304410 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzh5r\" (UniqueName: \"kubernetes.io/projected/d62e98e2-7bf5-43ca-9f5f-d0ade718d946-kube-api-access-pzh5r\") pod \"community-operators-fvt4q\" (UID: \"d62e98e2-7bf5-43ca-9f5f-d0ade718d946\") " pod="openshift-marketplace/community-operators-fvt4q" Oct 01 07:17:42 crc kubenswrapper[4661]: I1001 07:17:42.346484 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fvt4q" Oct 01 07:17:42 crc kubenswrapper[4661]: I1001 07:17:42.870739 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fvt4q"] Oct 01 07:17:43 crc kubenswrapper[4661]: I1001 07:17:43.533902 4661 generic.go:334] "Generic (PLEG): container finished" podID="d62e98e2-7bf5-43ca-9f5f-d0ade718d946" containerID="cc9556ec0521d4f8636058fa56a74736fe9a964ef3b0c437d796f51937e88a82" exitCode=0 Oct 01 07:17:43 crc kubenswrapper[4661]: I1001 07:17:43.534007 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fvt4q" event={"ID":"d62e98e2-7bf5-43ca-9f5f-d0ade718d946","Type":"ContainerDied","Data":"cc9556ec0521d4f8636058fa56a74736fe9a964ef3b0c437d796f51937e88a82"} Oct 01 07:17:43 crc kubenswrapper[4661]: I1001 07:17:43.534278 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fvt4q" event={"ID":"d62e98e2-7bf5-43ca-9f5f-d0ade718d946","Type":"ContainerStarted","Data":"59adf9370530ae97132273906ff34a493ca0623d9b85b16da05c08ff1d9c3115"} Oct 01 07:17:45 crc kubenswrapper[4661]: I1001 07:17:45.563848 4661 generic.go:334] "Generic (PLEG): container finished" podID="d62e98e2-7bf5-43ca-9f5f-d0ade718d946" containerID="85f011c601e5999de00e36f9c1e0f5fd4d8560413bdb805c1e918293e0c8f0f5" exitCode=0 Oct 01 07:17:45 crc kubenswrapper[4661]: I1001 07:17:45.564087 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fvt4q" event={"ID":"d62e98e2-7bf5-43ca-9f5f-d0ade718d946","Type":"ContainerDied","Data":"85f011c601e5999de00e36f9c1e0f5fd4d8560413bdb805c1e918293e0c8f0f5"} Oct 01 07:17:46 crc kubenswrapper[4661]: I1001 07:17:46.576966 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fvt4q" event={"ID":"d62e98e2-7bf5-43ca-9f5f-d0ade718d946","Type":"ContainerStarted","Data":"815c4db69ff95a29c75b488c69f48150e75bbfb35b371b583a290cda5c6451bb"} Oct 01 07:17:46 crc kubenswrapper[4661]: I1001 07:17:46.596617 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fvt4q" podStartSLOduration=3.162537511 podStartE2EDuration="5.596604077s" podCreationTimestamp="2025-10-01 07:17:41 +0000 UTC" firstStartedPulling="2025-10-01 07:17:43.537295157 +0000 UTC m=+6512.475273801" lastFinishedPulling="2025-10-01 07:17:45.971361743 +0000 UTC m=+6514.909340367" observedRunningTime="2025-10-01 07:17:46.589911583 +0000 UTC m=+6515.527890197" watchObservedRunningTime="2025-10-01 07:17:46.596604077 +0000 UTC m=+6515.534582691" Oct 01 07:17:52 crc kubenswrapper[4661]: I1001 07:17:52.347118 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fvt4q" Oct 01 07:17:52 crc kubenswrapper[4661]: I1001 07:17:52.347825 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fvt4q" Oct 01 07:17:52 crc kubenswrapper[4661]: I1001 07:17:52.439985 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fvt4q" Oct 01 07:17:52 crc kubenswrapper[4661]: I1001 07:17:52.706113 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fvt4q" Oct 01 07:17:52 crc kubenswrapper[4661]: I1001 07:17:52.755097 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fvt4q"] Oct 01 07:17:54 crc kubenswrapper[4661]: I1001 07:17:54.670944 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fvt4q" podUID="d62e98e2-7bf5-43ca-9f5f-d0ade718d946" containerName="registry-server" containerID="cri-o://815c4db69ff95a29c75b488c69f48150e75bbfb35b371b583a290cda5c6451bb" gracePeriod=2 Oct 01 07:17:54 crc kubenswrapper[4661]: I1001 07:17:54.756960 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:17:54 crc kubenswrapper[4661]: E1001 07:17:54.757681 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.160102 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fvt4q" Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.272198 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzh5r\" (UniqueName: \"kubernetes.io/projected/d62e98e2-7bf5-43ca-9f5f-d0ade718d946-kube-api-access-pzh5r\") pod \"d62e98e2-7bf5-43ca-9f5f-d0ade718d946\" (UID: \"d62e98e2-7bf5-43ca-9f5f-d0ade718d946\") " Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.272287 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d62e98e2-7bf5-43ca-9f5f-d0ade718d946-catalog-content\") pod \"d62e98e2-7bf5-43ca-9f5f-d0ade718d946\" (UID: \"d62e98e2-7bf5-43ca-9f5f-d0ade718d946\") " Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.272427 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d62e98e2-7bf5-43ca-9f5f-d0ade718d946-utilities\") pod \"d62e98e2-7bf5-43ca-9f5f-d0ade718d946\" (UID: \"d62e98e2-7bf5-43ca-9f5f-d0ade718d946\") " Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.273432 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d62e98e2-7bf5-43ca-9f5f-d0ade718d946-utilities" (OuterVolumeSpecName: "utilities") pod "d62e98e2-7bf5-43ca-9f5f-d0ade718d946" (UID: "d62e98e2-7bf5-43ca-9f5f-d0ade718d946"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.280860 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d62e98e2-7bf5-43ca-9f5f-d0ade718d946-kube-api-access-pzh5r" (OuterVolumeSpecName: "kube-api-access-pzh5r") pod "d62e98e2-7bf5-43ca-9f5f-d0ade718d946" (UID: "d62e98e2-7bf5-43ca-9f5f-d0ade718d946"). InnerVolumeSpecName "kube-api-access-pzh5r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.374924 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d62e98e2-7bf5-43ca-9f5f-d0ade718d946-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.374965 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzh5r\" (UniqueName: \"kubernetes.io/projected/d62e98e2-7bf5-43ca-9f5f-d0ade718d946-kube-api-access-pzh5r\") on node \"crc\" DevicePath \"\"" Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.505248 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d62e98e2-7bf5-43ca-9f5f-d0ade718d946-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d62e98e2-7bf5-43ca-9f5f-d0ade718d946" (UID: "d62e98e2-7bf5-43ca-9f5f-d0ade718d946"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.578120 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d62e98e2-7bf5-43ca-9f5f-d0ade718d946-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.688796 4661 generic.go:334] "Generic (PLEG): container finished" podID="d62e98e2-7bf5-43ca-9f5f-d0ade718d946" containerID="815c4db69ff95a29c75b488c69f48150e75bbfb35b371b583a290cda5c6451bb" exitCode=0 Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.688859 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fvt4q" event={"ID":"d62e98e2-7bf5-43ca-9f5f-d0ade718d946","Type":"ContainerDied","Data":"815c4db69ff95a29c75b488c69f48150e75bbfb35b371b583a290cda5c6451bb"} Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.688931 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fvt4q" event={"ID":"d62e98e2-7bf5-43ca-9f5f-d0ade718d946","Type":"ContainerDied","Data":"59adf9370530ae97132273906ff34a493ca0623d9b85b16da05c08ff1d9c3115"} Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.688994 4661 scope.go:117] "RemoveContainer" containerID="815c4db69ff95a29c75b488c69f48150e75bbfb35b371b583a290cda5c6451bb" Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.690001 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fvt4q" Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.747171 4661 scope.go:117] "RemoveContainer" containerID="85f011c601e5999de00e36f9c1e0f5fd4d8560413bdb805c1e918293e0c8f0f5" Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.753415 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fvt4q"] Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.787714 4661 scope.go:117] "RemoveContainer" containerID="cc9556ec0521d4f8636058fa56a74736fe9a964ef3b0c437d796f51937e88a82" Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.790715 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fvt4q"] Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.837733 4661 scope.go:117] "RemoveContainer" containerID="815c4db69ff95a29c75b488c69f48150e75bbfb35b371b583a290cda5c6451bb" Oct 01 07:17:55 crc kubenswrapper[4661]: E1001 07:17:55.838126 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"815c4db69ff95a29c75b488c69f48150e75bbfb35b371b583a290cda5c6451bb\": container with ID starting with 815c4db69ff95a29c75b488c69f48150e75bbfb35b371b583a290cda5c6451bb not found: ID does not exist" containerID="815c4db69ff95a29c75b488c69f48150e75bbfb35b371b583a290cda5c6451bb" Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.838163 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"815c4db69ff95a29c75b488c69f48150e75bbfb35b371b583a290cda5c6451bb"} err="failed to get container status \"815c4db69ff95a29c75b488c69f48150e75bbfb35b371b583a290cda5c6451bb\": rpc error: code = NotFound desc = could not find container \"815c4db69ff95a29c75b488c69f48150e75bbfb35b371b583a290cda5c6451bb\": container with ID starting with 815c4db69ff95a29c75b488c69f48150e75bbfb35b371b583a290cda5c6451bb not found: ID does not exist" Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.838217 4661 scope.go:117] "RemoveContainer" containerID="85f011c601e5999de00e36f9c1e0f5fd4d8560413bdb805c1e918293e0c8f0f5" Oct 01 07:17:55 crc kubenswrapper[4661]: E1001 07:17:55.838696 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85f011c601e5999de00e36f9c1e0f5fd4d8560413bdb805c1e918293e0c8f0f5\": container with ID starting with 85f011c601e5999de00e36f9c1e0f5fd4d8560413bdb805c1e918293e0c8f0f5 not found: ID does not exist" containerID="85f011c601e5999de00e36f9c1e0f5fd4d8560413bdb805c1e918293e0c8f0f5" Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.838730 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85f011c601e5999de00e36f9c1e0f5fd4d8560413bdb805c1e918293e0c8f0f5"} err="failed to get container status \"85f011c601e5999de00e36f9c1e0f5fd4d8560413bdb805c1e918293e0c8f0f5\": rpc error: code = NotFound desc = could not find container \"85f011c601e5999de00e36f9c1e0f5fd4d8560413bdb805c1e918293e0c8f0f5\": container with ID starting with 85f011c601e5999de00e36f9c1e0f5fd4d8560413bdb805c1e918293e0c8f0f5 not found: ID does not exist" Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.838749 4661 scope.go:117] "RemoveContainer" containerID="cc9556ec0521d4f8636058fa56a74736fe9a964ef3b0c437d796f51937e88a82" Oct 01 07:17:55 crc kubenswrapper[4661]: E1001 07:17:55.838998 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc9556ec0521d4f8636058fa56a74736fe9a964ef3b0c437d796f51937e88a82\": container with ID starting with cc9556ec0521d4f8636058fa56a74736fe9a964ef3b0c437d796f51937e88a82 not found: ID does not exist" containerID="cc9556ec0521d4f8636058fa56a74736fe9a964ef3b0c437d796f51937e88a82" Oct 01 07:17:55 crc kubenswrapper[4661]: I1001 07:17:55.839107 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc9556ec0521d4f8636058fa56a74736fe9a964ef3b0c437d796f51937e88a82"} err="failed to get container status \"cc9556ec0521d4f8636058fa56a74736fe9a964ef3b0c437d796f51937e88a82\": rpc error: code = NotFound desc = could not find container \"cc9556ec0521d4f8636058fa56a74736fe9a964ef3b0c437d796f51937e88a82\": container with ID starting with cc9556ec0521d4f8636058fa56a74736fe9a964ef3b0c437d796f51937e88a82 not found: ID does not exist" Oct 01 07:17:57 crc kubenswrapper[4661]: I1001 07:17:57.777828 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d62e98e2-7bf5-43ca-9f5f-d0ade718d946" path="/var/lib/kubelet/pods/d62e98e2-7bf5-43ca-9f5f-d0ade718d946/volumes" Oct 01 07:18:06 crc kubenswrapper[4661]: I1001 07:18:06.757279 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:18:06 crc kubenswrapper[4661]: E1001 07:18:06.757992 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:18:17 crc kubenswrapper[4661]: I1001 07:18:17.757747 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:18:17 crc kubenswrapper[4661]: E1001 07:18:17.758605 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:18:30 crc kubenswrapper[4661]: I1001 07:18:30.757697 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:18:30 crc kubenswrapper[4661]: E1001 07:18:30.758582 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:18:44 crc kubenswrapper[4661]: I1001 07:18:44.295285 4661 generic.go:334] "Generic (PLEG): container finished" podID="61afe435-c553-43ce-876a-b44bb52bc6d2" containerID="b0722a1eb4e0625f4f3f8cf1145c507df188d76a9e25be46adb09a6db906f73b" exitCode=0 Oct 01 07:18:44 crc kubenswrapper[4661]: I1001 07:18:44.295363 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pqw4p/must-gather-pkdb2" event={"ID":"61afe435-c553-43ce-876a-b44bb52bc6d2","Type":"ContainerDied","Data":"b0722a1eb4e0625f4f3f8cf1145c507df188d76a9e25be46adb09a6db906f73b"} Oct 01 07:18:44 crc kubenswrapper[4661]: I1001 07:18:44.296560 4661 scope.go:117] "RemoveContainer" containerID="b0722a1eb4e0625f4f3f8cf1145c507df188d76a9e25be46adb09a6db906f73b" Oct 01 07:18:44 crc kubenswrapper[4661]: I1001 07:18:44.757059 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:18:44 crc kubenswrapper[4661]: E1001 07:18:44.757652 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:18:45 crc kubenswrapper[4661]: I1001 07:18:45.225363 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-pqw4p_must-gather-pkdb2_61afe435-c553-43ce-876a-b44bb52bc6d2/gather/0.log" Oct 01 07:18:56 crc kubenswrapper[4661]: I1001 07:18:56.758980 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:18:56 crc kubenswrapper[4661]: E1001 07:18:56.759672 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:18:58 crc kubenswrapper[4661]: I1001 07:18:58.056759 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-pqw4p/must-gather-pkdb2"] Oct 01 07:18:58 crc kubenswrapper[4661]: I1001 07:18:58.057534 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-pqw4p/must-gather-pkdb2" podUID="61afe435-c553-43ce-876a-b44bb52bc6d2" containerName="copy" containerID="cri-o://892fd50f7a35b6fa587eb5624911465618bcaee23d4cf73ffb2fdd129e1652a9" gracePeriod=2 Oct 01 07:18:58 crc kubenswrapper[4661]: I1001 07:18:58.079490 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-pqw4p/must-gather-pkdb2"] Oct 01 07:18:58 crc kubenswrapper[4661]: I1001 07:18:58.461029 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-pqw4p_must-gather-pkdb2_61afe435-c553-43ce-876a-b44bb52bc6d2/copy/0.log" Oct 01 07:18:58 crc kubenswrapper[4661]: I1001 07:18:58.461908 4661 generic.go:334] "Generic (PLEG): container finished" podID="61afe435-c553-43ce-876a-b44bb52bc6d2" containerID="892fd50f7a35b6fa587eb5624911465618bcaee23d4cf73ffb2fdd129e1652a9" exitCode=143 Oct 01 07:18:58 crc kubenswrapper[4661]: I1001 07:18:58.461964 4661 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de85e25e47b405acbc35f4b8df30ac6f5c211617795db80d852d7267bdc3a5e4" Oct 01 07:18:58 crc kubenswrapper[4661]: I1001 07:18:58.494172 4661 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-pqw4p_must-gather-pkdb2_61afe435-c553-43ce-876a-b44bb52bc6d2/copy/0.log" Oct 01 07:18:58 crc kubenswrapper[4661]: I1001 07:18:58.494548 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pqw4p/must-gather-pkdb2" Oct 01 07:18:58 crc kubenswrapper[4661]: I1001 07:18:58.606747 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/61afe435-c553-43ce-876a-b44bb52bc6d2-must-gather-output\") pod \"61afe435-c553-43ce-876a-b44bb52bc6d2\" (UID: \"61afe435-c553-43ce-876a-b44bb52bc6d2\") " Oct 01 07:18:58 crc kubenswrapper[4661]: I1001 07:18:58.606896 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhv6q\" (UniqueName: \"kubernetes.io/projected/61afe435-c553-43ce-876a-b44bb52bc6d2-kube-api-access-jhv6q\") pod \"61afe435-c553-43ce-876a-b44bb52bc6d2\" (UID: \"61afe435-c553-43ce-876a-b44bb52bc6d2\") " Oct 01 07:18:58 crc kubenswrapper[4661]: I1001 07:18:58.612592 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61afe435-c553-43ce-876a-b44bb52bc6d2-kube-api-access-jhv6q" (OuterVolumeSpecName: "kube-api-access-jhv6q") pod "61afe435-c553-43ce-876a-b44bb52bc6d2" (UID: "61afe435-c553-43ce-876a-b44bb52bc6d2"). InnerVolumeSpecName "kube-api-access-jhv6q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 07:18:58 crc kubenswrapper[4661]: I1001 07:18:58.708876 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhv6q\" (UniqueName: \"kubernetes.io/projected/61afe435-c553-43ce-876a-b44bb52bc6d2-kube-api-access-jhv6q\") on node \"crc\" DevicePath \"\"" Oct 01 07:18:58 crc kubenswrapper[4661]: I1001 07:18:58.789772 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61afe435-c553-43ce-876a-b44bb52bc6d2-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "61afe435-c553-43ce-876a-b44bb52bc6d2" (UID: "61afe435-c553-43ce-876a-b44bb52bc6d2"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 07:18:58 crc kubenswrapper[4661]: I1001 07:18:58.810552 4661 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/61afe435-c553-43ce-876a-b44bb52bc6d2-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 01 07:18:59 crc kubenswrapper[4661]: I1001 07:18:59.471822 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pqw4p/must-gather-pkdb2" Oct 01 07:18:59 crc kubenswrapper[4661]: I1001 07:18:59.769219 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61afe435-c553-43ce-876a-b44bb52bc6d2" path="/var/lib/kubelet/pods/61afe435-c553-43ce-876a-b44bb52bc6d2/volumes" Oct 01 07:19:11 crc kubenswrapper[4661]: I1001 07:19:11.779553 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:19:11 crc kubenswrapper[4661]: E1001 07:19:11.780925 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:19:25 crc kubenswrapper[4661]: I1001 07:19:25.736900 4661 scope.go:117] "RemoveContainer" containerID="892fd50f7a35b6fa587eb5624911465618bcaee23d4cf73ffb2fdd129e1652a9" Oct 01 07:19:25 crc kubenswrapper[4661]: I1001 07:19:25.761957 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:19:25 crc kubenswrapper[4661]: E1001 07:19:25.762297 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:19:25 crc kubenswrapper[4661]: I1001 07:19:25.766243 4661 scope.go:117] "RemoveContainer" containerID="5ca8a2542a50e58d2137e78d9ec8d9b33cc02e6b03be3ab881fb1933faad21b6" Oct 01 07:19:25 crc kubenswrapper[4661]: I1001 07:19:25.802479 4661 scope.go:117] "RemoveContainer" containerID="b0722a1eb4e0625f4f3f8cf1145c507df188d76a9e25be46adb09a6db906f73b" Oct 01 07:19:39 crc kubenswrapper[4661]: I1001 07:19:39.757889 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:19:39 crc kubenswrapper[4661]: E1001 07:19:39.758773 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:19:54 crc kubenswrapper[4661]: I1001 07:19:54.756603 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:19:54 crc kubenswrapper[4661]: E1001 07:19:54.757287 4661 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wp2wh_openshift-machine-config-operator(7584c4bc-4202-487e-a2b4-4319f428a792)\"" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" podUID="7584c4bc-4202-487e-a2b4-4319f428a792" Oct 01 07:19:56 crc kubenswrapper[4661]: I1001 07:19:56.734590 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qrfxn"] Oct 01 07:19:56 crc kubenswrapper[4661]: E1001 07:19:56.735531 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d62e98e2-7bf5-43ca-9f5f-d0ade718d946" containerName="extract-content" Oct 01 07:19:56 crc kubenswrapper[4661]: I1001 07:19:56.735555 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="d62e98e2-7bf5-43ca-9f5f-d0ade718d946" containerName="extract-content" Oct 01 07:19:56 crc kubenswrapper[4661]: E1001 07:19:56.735614 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61afe435-c553-43ce-876a-b44bb52bc6d2" containerName="gather" Oct 01 07:19:56 crc kubenswrapper[4661]: I1001 07:19:56.735627 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="61afe435-c553-43ce-876a-b44bb52bc6d2" containerName="gather" Oct 01 07:19:56 crc kubenswrapper[4661]: E1001 07:19:56.735710 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61afe435-c553-43ce-876a-b44bb52bc6d2" containerName="copy" Oct 01 07:19:56 crc kubenswrapper[4661]: I1001 07:19:56.735724 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="61afe435-c553-43ce-876a-b44bb52bc6d2" containerName="copy" Oct 01 07:19:56 crc kubenswrapper[4661]: E1001 07:19:56.735751 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d62e98e2-7bf5-43ca-9f5f-d0ade718d946" containerName="registry-server" Oct 01 07:19:56 crc kubenswrapper[4661]: I1001 07:19:56.735765 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="d62e98e2-7bf5-43ca-9f5f-d0ade718d946" containerName="registry-server" Oct 01 07:19:56 crc kubenswrapper[4661]: E1001 07:19:56.735796 4661 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d62e98e2-7bf5-43ca-9f5f-d0ade718d946" containerName="extract-utilities" Oct 01 07:19:56 crc kubenswrapper[4661]: I1001 07:19:56.735809 4661 state_mem.go:107] "Deleted CPUSet assignment" podUID="d62e98e2-7bf5-43ca-9f5f-d0ade718d946" containerName="extract-utilities" Oct 01 07:19:56 crc kubenswrapper[4661]: I1001 07:19:56.736193 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="61afe435-c553-43ce-876a-b44bb52bc6d2" containerName="copy" Oct 01 07:19:56 crc kubenswrapper[4661]: I1001 07:19:56.736226 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="d62e98e2-7bf5-43ca-9f5f-d0ade718d946" containerName="registry-server" Oct 01 07:19:56 crc kubenswrapper[4661]: I1001 07:19:56.736247 4661 memory_manager.go:354] "RemoveStaleState removing state" podUID="61afe435-c553-43ce-876a-b44bb52bc6d2" containerName="gather" Oct 01 07:19:56 crc kubenswrapper[4661]: I1001 07:19:56.739003 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qrfxn" Oct 01 07:19:56 crc kubenswrapper[4661]: I1001 07:19:56.760867 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qrfxn"] Oct 01 07:19:56 crc kubenswrapper[4661]: I1001 07:19:56.807242 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19a5b1df-d0d6-4e09-996f-c1e09431d83f-catalog-content\") pod \"certified-operators-qrfxn\" (UID: \"19a5b1df-d0d6-4e09-996f-c1e09431d83f\") " pod="openshift-marketplace/certified-operators-qrfxn" Oct 01 07:19:56 crc kubenswrapper[4661]: I1001 07:19:56.807341 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19a5b1df-d0d6-4e09-996f-c1e09431d83f-utilities\") pod \"certified-operators-qrfxn\" (UID: \"19a5b1df-d0d6-4e09-996f-c1e09431d83f\") " pod="openshift-marketplace/certified-operators-qrfxn" Oct 01 07:19:56 crc kubenswrapper[4661]: I1001 07:19:56.807405 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nwck\" (UniqueName: \"kubernetes.io/projected/19a5b1df-d0d6-4e09-996f-c1e09431d83f-kube-api-access-8nwck\") pod \"certified-operators-qrfxn\" (UID: \"19a5b1df-d0d6-4e09-996f-c1e09431d83f\") " pod="openshift-marketplace/certified-operators-qrfxn" Oct 01 07:19:56 crc kubenswrapper[4661]: I1001 07:19:56.908838 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19a5b1df-d0d6-4e09-996f-c1e09431d83f-utilities\") pod \"certified-operators-qrfxn\" (UID: \"19a5b1df-d0d6-4e09-996f-c1e09431d83f\") " pod="openshift-marketplace/certified-operators-qrfxn" Oct 01 07:19:56 crc kubenswrapper[4661]: I1001 07:19:56.908950 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nwck\" (UniqueName: \"kubernetes.io/projected/19a5b1df-d0d6-4e09-996f-c1e09431d83f-kube-api-access-8nwck\") pod \"certified-operators-qrfxn\" (UID: \"19a5b1df-d0d6-4e09-996f-c1e09431d83f\") " pod="openshift-marketplace/certified-operators-qrfxn" Oct 01 07:19:56 crc kubenswrapper[4661]: I1001 07:19:56.909065 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19a5b1df-d0d6-4e09-996f-c1e09431d83f-catalog-content\") pod \"certified-operators-qrfxn\" (UID: \"19a5b1df-d0d6-4e09-996f-c1e09431d83f\") " pod="openshift-marketplace/certified-operators-qrfxn" Oct 01 07:19:56 crc kubenswrapper[4661]: I1001 07:19:56.909505 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19a5b1df-d0d6-4e09-996f-c1e09431d83f-catalog-content\") pod \"certified-operators-qrfxn\" (UID: \"19a5b1df-d0d6-4e09-996f-c1e09431d83f\") " pod="openshift-marketplace/certified-operators-qrfxn" Oct 01 07:19:56 crc kubenswrapper[4661]: I1001 07:19:56.910054 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19a5b1df-d0d6-4e09-996f-c1e09431d83f-utilities\") pod \"certified-operators-qrfxn\" (UID: \"19a5b1df-d0d6-4e09-996f-c1e09431d83f\") " pod="openshift-marketplace/certified-operators-qrfxn" Oct 01 07:19:56 crc kubenswrapper[4661]: I1001 07:19:56.931467 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nwck\" (UniqueName: \"kubernetes.io/projected/19a5b1df-d0d6-4e09-996f-c1e09431d83f-kube-api-access-8nwck\") pod \"certified-operators-qrfxn\" (UID: \"19a5b1df-d0d6-4e09-996f-c1e09431d83f\") " pod="openshift-marketplace/certified-operators-qrfxn" Oct 01 07:19:57 crc kubenswrapper[4661]: I1001 07:19:57.109333 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qrfxn" Oct 01 07:19:57 crc kubenswrapper[4661]: I1001 07:19:57.703405 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qrfxn"] Oct 01 07:19:58 crc kubenswrapper[4661]: I1001 07:19:58.200391 4661 generic.go:334] "Generic (PLEG): container finished" podID="19a5b1df-d0d6-4e09-996f-c1e09431d83f" containerID="4983853f6d1efea7c983752c4a740d0f43da6848180589bddffc98c22761a6bc" exitCode=0 Oct 01 07:19:58 crc kubenswrapper[4661]: I1001 07:19:58.200435 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrfxn" event={"ID":"19a5b1df-d0d6-4e09-996f-c1e09431d83f","Type":"ContainerDied","Data":"4983853f6d1efea7c983752c4a740d0f43da6848180589bddffc98c22761a6bc"} Oct 01 07:19:58 crc kubenswrapper[4661]: I1001 07:19:58.200462 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrfxn" event={"ID":"19a5b1df-d0d6-4e09-996f-c1e09431d83f","Type":"ContainerStarted","Data":"bc37ce0099bc6e12386bb6f544d0d9c0959ac7fcf7b4d3e230e9ed7d6f1f0df8"} Oct 01 07:20:00 crc kubenswrapper[4661]: I1001 07:20:00.239388 4661 generic.go:334] "Generic (PLEG): container finished" podID="19a5b1df-d0d6-4e09-996f-c1e09431d83f" containerID="57505c4958a5af4b77b5c8e1a176c7665ce27cdec9671e82d8a7df954756282f" exitCode=0 Oct 01 07:20:00 crc kubenswrapper[4661]: I1001 07:20:00.240255 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrfxn" event={"ID":"19a5b1df-d0d6-4e09-996f-c1e09431d83f","Type":"ContainerDied","Data":"57505c4958a5af4b77b5c8e1a176c7665ce27cdec9671e82d8a7df954756282f"} Oct 01 07:20:00 crc kubenswrapper[4661]: I1001 07:20:00.718698 4661 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5cd8l"] Oct 01 07:20:00 crc kubenswrapper[4661]: I1001 07:20:00.720798 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5cd8l" Oct 01 07:20:00 crc kubenswrapper[4661]: I1001 07:20:00.746732 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5cd8l"] Oct 01 07:20:00 crc kubenswrapper[4661]: I1001 07:20:00.787723 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a80714b2-bdf4-470d-b4f4-2e23aef5c6ed-utilities\") pod \"redhat-marketplace-5cd8l\" (UID: \"a80714b2-bdf4-470d-b4f4-2e23aef5c6ed\") " pod="openshift-marketplace/redhat-marketplace-5cd8l" Oct 01 07:20:00 crc kubenswrapper[4661]: I1001 07:20:00.787833 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a80714b2-bdf4-470d-b4f4-2e23aef5c6ed-catalog-content\") pod \"redhat-marketplace-5cd8l\" (UID: \"a80714b2-bdf4-470d-b4f4-2e23aef5c6ed\") " pod="openshift-marketplace/redhat-marketplace-5cd8l" Oct 01 07:20:00 crc kubenswrapper[4661]: I1001 07:20:00.787989 4661 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtvnn\" (UniqueName: \"kubernetes.io/projected/a80714b2-bdf4-470d-b4f4-2e23aef5c6ed-kube-api-access-mtvnn\") pod \"redhat-marketplace-5cd8l\" (UID: \"a80714b2-bdf4-470d-b4f4-2e23aef5c6ed\") " pod="openshift-marketplace/redhat-marketplace-5cd8l" Oct 01 07:20:00 crc kubenswrapper[4661]: I1001 07:20:00.890042 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtvnn\" (UniqueName: \"kubernetes.io/projected/a80714b2-bdf4-470d-b4f4-2e23aef5c6ed-kube-api-access-mtvnn\") pod \"redhat-marketplace-5cd8l\" (UID: \"a80714b2-bdf4-470d-b4f4-2e23aef5c6ed\") " pod="openshift-marketplace/redhat-marketplace-5cd8l" Oct 01 07:20:00 crc kubenswrapper[4661]: I1001 07:20:00.890304 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a80714b2-bdf4-470d-b4f4-2e23aef5c6ed-utilities\") pod \"redhat-marketplace-5cd8l\" (UID: \"a80714b2-bdf4-470d-b4f4-2e23aef5c6ed\") " pod="openshift-marketplace/redhat-marketplace-5cd8l" Oct 01 07:20:00 crc kubenswrapper[4661]: I1001 07:20:00.890383 4661 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a80714b2-bdf4-470d-b4f4-2e23aef5c6ed-catalog-content\") pod \"redhat-marketplace-5cd8l\" (UID: \"a80714b2-bdf4-470d-b4f4-2e23aef5c6ed\") " pod="openshift-marketplace/redhat-marketplace-5cd8l" Oct 01 07:20:00 crc kubenswrapper[4661]: I1001 07:20:00.890876 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a80714b2-bdf4-470d-b4f4-2e23aef5c6ed-catalog-content\") pod \"redhat-marketplace-5cd8l\" (UID: \"a80714b2-bdf4-470d-b4f4-2e23aef5c6ed\") " pod="openshift-marketplace/redhat-marketplace-5cd8l" Oct 01 07:20:00 crc kubenswrapper[4661]: I1001 07:20:00.891798 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a80714b2-bdf4-470d-b4f4-2e23aef5c6ed-utilities\") pod \"redhat-marketplace-5cd8l\" (UID: \"a80714b2-bdf4-470d-b4f4-2e23aef5c6ed\") " pod="openshift-marketplace/redhat-marketplace-5cd8l" Oct 01 07:20:00 crc kubenswrapper[4661]: I1001 07:20:00.910233 4661 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtvnn\" (UniqueName: \"kubernetes.io/projected/a80714b2-bdf4-470d-b4f4-2e23aef5c6ed-kube-api-access-mtvnn\") pod \"redhat-marketplace-5cd8l\" (UID: \"a80714b2-bdf4-470d-b4f4-2e23aef5c6ed\") " pod="openshift-marketplace/redhat-marketplace-5cd8l" Oct 01 07:20:01 crc kubenswrapper[4661]: I1001 07:20:01.044353 4661 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5cd8l" Oct 01 07:20:01 crc kubenswrapper[4661]: I1001 07:20:01.260560 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrfxn" event={"ID":"19a5b1df-d0d6-4e09-996f-c1e09431d83f","Type":"ContainerStarted","Data":"917e469828bd8dc193e03eaeda35bc4861683d4fa444661a8dcfe2efe9a76cc2"} Oct 01 07:20:01 crc kubenswrapper[4661]: I1001 07:20:01.287609 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qrfxn" podStartSLOduration=2.755767008 podStartE2EDuration="5.287593532s" podCreationTimestamp="2025-10-01 07:19:56 +0000 UTC" firstStartedPulling="2025-10-01 07:19:58.202662746 +0000 UTC m=+6647.140641370" lastFinishedPulling="2025-10-01 07:20:00.73448927 +0000 UTC m=+6649.672467894" observedRunningTime="2025-10-01 07:20:01.286512032 +0000 UTC m=+6650.224490646" watchObservedRunningTime="2025-10-01 07:20:01.287593532 +0000 UTC m=+6650.225572146" Oct 01 07:20:01 crc kubenswrapper[4661]: W1001 07:20:01.515771 4661 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda80714b2_bdf4_470d_b4f4_2e23aef5c6ed.slice/crio-14c0651cc156d41633b56d77ca377992079c465bb590e69eb15be05d6ea33ee5 WatchSource:0}: Error finding container 14c0651cc156d41633b56d77ca377992079c465bb590e69eb15be05d6ea33ee5: Status 404 returned error can't find the container with id 14c0651cc156d41633b56d77ca377992079c465bb590e69eb15be05d6ea33ee5 Oct 01 07:20:01 crc kubenswrapper[4661]: I1001 07:20:01.526934 4661 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5cd8l"] Oct 01 07:20:02 crc kubenswrapper[4661]: I1001 07:20:02.271228 4661 generic.go:334] "Generic (PLEG): container finished" podID="a80714b2-bdf4-470d-b4f4-2e23aef5c6ed" containerID="1967fbd54ad64f7a943685bbce58d00da5f0e4da7fa1e3b89aea5ca8aa5bd8b0" exitCode=0 Oct 01 07:20:02 crc kubenswrapper[4661]: I1001 07:20:02.271297 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5cd8l" event={"ID":"a80714b2-bdf4-470d-b4f4-2e23aef5c6ed","Type":"ContainerDied","Data":"1967fbd54ad64f7a943685bbce58d00da5f0e4da7fa1e3b89aea5ca8aa5bd8b0"} Oct 01 07:20:02 crc kubenswrapper[4661]: I1001 07:20:02.271575 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5cd8l" event={"ID":"a80714b2-bdf4-470d-b4f4-2e23aef5c6ed","Type":"ContainerStarted","Data":"14c0651cc156d41633b56d77ca377992079c465bb590e69eb15be05d6ea33ee5"} Oct 01 07:20:04 crc kubenswrapper[4661]: I1001 07:20:04.309287 4661 generic.go:334] "Generic (PLEG): container finished" podID="a80714b2-bdf4-470d-b4f4-2e23aef5c6ed" containerID="62fa5620e195b38806d254a433adcb331b4bec084c6315b60df3568d825ba370" exitCode=0 Oct 01 07:20:04 crc kubenswrapper[4661]: I1001 07:20:04.309409 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5cd8l" event={"ID":"a80714b2-bdf4-470d-b4f4-2e23aef5c6ed","Type":"ContainerDied","Data":"62fa5620e195b38806d254a433adcb331b4bec084c6315b60df3568d825ba370"} Oct 01 07:20:05 crc kubenswrapper[4661]: I1001 07:20:05.328344 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5cd8l" event={"ID":"a80714b2-bdf4-470d-b4f4-2e23aef5c6ed","Type":"ContainerStarted","Data":"36c18ef6f135591d1eacdfdb8c1e20ebcde76c257297bd7a9a1a11970745e2f2"} Oct 01 07:20:05 crc kubenswrapper[4661]: I1001 07:20:05.349255 4661 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5cd8l" podStartSLOduration=2.831298189 podStartE2EDuration="5.349237664s" podCreationTimestamp="2025-10-01 07:20:00 +0000 UTC" firstStartedPulling="2025-10-01 07:20:02.273319024 +0000 UTC m=+6651.211297638" lastFinishedPulling="2025-10-01 07:20:04.791258489 +0000 UTC m=+6653.729237113" observedRunningTime="2025-10-01 07:20:05.348188414 +0000 UTC m=+6654.286167048" watchObservedRunningTime="2025-10-01 07:20:05.349237664 +0000 UTC m=+6654.287216288" Oct 01 07:20:07 crc kubenswrapper[4661]: I1001 07:20:07.109578 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qrfxn" Oct 01 07:20:07 crc kubenswrapper[4661]: I1001 07:20:07.110959 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qrfxn" Oct 01 07:20:07 crc kubenswrapper[4661]: I1001 07:20:07.199334 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qrfxn" Oct 01 07:20:07 crc kubenswrapper[4661]: I1001 07:20:07.424417 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qrfxn" Oct 01 07:20:08 crc kubenswrapper[4661]: I1001 07:20:08.757380 4661 scope.go:117] "RemoveContainer" containerID="57fda00adb0aef04f328db61a6406b7c9b838f98a0b92d36bcb73ea8081e99e4" Oct 01 07:20:09 crc kubenswrapper[4661]: I1001 07:20:09.384618 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wp2wh" event={"ID":"7584c4bc-4202-487e-a2b4-4319f428a792","Type":"ContainerStarted","Data":"717a9aa2bb11eea66f68b2fd08aa44913f1445f647b299784e479dff222f035d"} Oct 01 07:20:09 crc kubenswrapper[4661]: I1001 07:20:09.709395 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qrfxn"] Oct 01 07:20:09 crc kubenswrapper[4661]: I1001 07:20:09.709593 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qrfxn" podUID="19a5b1df-d0d6-4e09-996f-c1e09431d83f" containerName="registry-server" containerID="cri-o://917e469828bd8dc193e03eaeda35bc4861683d4fa444661a8dcfe2efe9a76cc2" gracePeriod=2 Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.253455 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qrfxn" Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.302248 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8nwck\" (UniqueName: \"kubernetes.io/projected/19a5b1df-d0d6-4e09-996f-c1e09431d83f-kube-api-access-8nwck\") pod \"19a5b1df-d0d6-4e09-996f-c1e09431d83f\" (UID: \"19a5b1df-d0d6-4e09-996f-c1e09431d83f\") " Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.302369 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19a5b1df-d0d6-4e09-996f-c1e09431d83f-utilities\") pod \"19a5b1df-d0d6-4e09-996f-c1e09431d83f\" (UID: \"19a5b1df-d0d6-4e09-996f-c1e09431d83f\") " Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.302416 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19a5b1df-d0d6-4e09-996f-c1e09431d83f-catalog-content\") pod \"19a5b1df-d0d6-4e09-996f-c1e09431d83f\" (UID: \"19a5b1df-d0d6-4e09-996f-c1e09431d83f\") " Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.303358 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19a5b1df-d0d6-4e09-996f-c1e09431d83f-utilities" (OuterVolumeSpecName: "utilities") pod "19a5b1df-d0d6-4e09-996f-c1e09431d83f" (UID: "19a5b1df-d0d6-4e09-996f-c1e09431d83f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.310035 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19a5b1df-d0d6-4e09-996f-c1e09431d83f-kube-api-access-8nwck" (OuterVolumeSpecName: "kube-api-access-8nwck") pod "19a5b1df-d0d6-4e09-996f-c1e09431d83f" (UID: "19a5b1df-d0d6-4e09-996f-c1e09431d83f"). InnerVolumeSpecName "kube-api-access-8nwck". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.352926 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19a5b1df-d0d6-4e09-996f-c1e09431d83f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "19a5b1df-d0d6-4e09-996f-c1e09431d83f" (UID: "19a5b1df-d0d6-4e09-996f-c1e09431d83f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.394544 4661 generic.go:334] "Generic (PLEG): container finished" podID="19a5b1df-d0d6-4e09-996f-c1e09431d83f" containerID="917e469828bd8dc193e03eaeda35bc4861683d4fa444661a8dcfe2efe9a76cc2" exitCode=0 Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.394596 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrfxn" event={"ID":"19a5b1df-d0d6-4e09-996f-c1e09431d83f","Type":"ContainerDied","Data":"917e469828bd8dc193e03eaeda35bc4861683d4fa444661a8dcfe2efe9a76cc2"} Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.394608 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qrfxn" Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.394627 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrfxn" event={"ID":"19a5b1df-d0d6-4e09-996f-c1e09431d83f","Type":"ContainerDied","Data":"bc37ce0099bc6e12386bb6f544d0d9c0959ac7fcf7b4d3e230e9ed7d6f1f0df8"} Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.394683 4661 scope.go:117] "RemoveContainer" containerID="917e469828bd8dc193e03eaeda35bc4861683d4fa444661a8dcfe2efe9a76cc2" Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.405055 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8nwck\" (UniqueName: \"kubernetes.io/projected/19a5b1df-d0d6-4e09-996f-c1e09431d83f-kube-api-access-8nwck\") on node \"crc\" DevicePath \"\"" Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.405103 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19a5b1df-d0d6-4e09-996f-c1e09431d83f-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.405116 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19a5b1df-d0d6-4e09-996f-c1e09431d83f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.416261 4661 scope.go:117] "RemoveContainer" containerID="57505c4958a5af4b77b5c8e1a176c7665ce27cdec9671e82d8a7df954756282f" Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.437584 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qrfxn"] Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.453615 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qrfxn"] Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.454625 4661 scope.go:117] "RemoveContainer" containerID="4983853f6d1efea7c983752c4a740d0f43da6848180589bddffc98c22761a6bc" Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.483186 4661 scope.go:117] "RemoveContainer" containerID="917e469828bd8dc193e03eaeda35bc4861683d4fa444661a8dcfe2efe9a76cc2" Oct 01 07:20:10 crc kubenswrapper[4661]: E1001 07:20:10.483714 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"917e469828bd8dc193e03eaeda35bc4861683d4fa444661a8dcfe2efe9a76cc2\": container with ID starting with 917e469828bd8dc193e03eaeda35bc4861683d4fa444661a8dcfe2efe9a76cc2 not found: ID does not exist" containerID="917e469828bd8dc193e03eaeda35bc4861683d4fa444661a8dcfe2efe9a76cc2" Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.483749 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"917e469828bd8dc193e03eaeda35bc4861683d4fa444661a8dcfe2efe9a76cc2"} err="failed to get container status \"917e469828bd8dc193e03eaeda35bc4861683d4fa444661a8dcfe2efe9a76cc2\": rpc error: code = NotFound desc = could not find container \"917e469828bd8dc193e03eaeda35bc4861683d4fa444661a8dcfe2efe9a76cc2\": container with ID starting with 917e469828bd8dc193e03eaeda35bc4861683d4fa444661a8dcfe2efe9a76cc2 not found: ID does not exist" Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.483770 4661 scope.go:117] "RemoveContainer" containerID="57505c4958a5af4b77b5c8e1a176c7665ce27cdec9671e82d8a7df954756282f" Oct 01 07:20:10 crc kubenswrapper[4661]: E1001 07:20:10.484082 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57505c4958a5af4b77b5c8e1a176c7665ce27cdec9671e82d8a7df954756282f\": container with ID starting with 57505c4958a5af4b77b5c8e1a176c7665ce27cdec9671e82d8a7df954756282f not found: ID does not exist" containerID="57505c4958a5af4b77b5c8e1a176c7665ce27cdec9671e82d8a7df954756282f" Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.484103 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57505c4958a5af4b77b5c8e1a176c7665ce27cdec9671e82d8a7df954756282f"} err="failed to get container status \"57505c4958a5af4b77b5c8e1a176c7665ce27cdec9671e82d8a7df954756282f\": rpc error: code = NotFound desc = could not find container \"57505c4958a5af4b77b5c8e1a176c7665ce27cdec9671e82d8a7df954756282f\": container with ID starting with 57505c4958a5af4b77b5c8e1a176c7665ce27cdec9671e82d8a7df954756282f not found: ID does not exist" Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.484119 4661 scope.go:117] "RemoveContainer" containerID="4983853f6d1efea7c983752c4a740d0f43da6848180589bddffc98c22761a6bc" Oct 01 07:20:10 crc kubenswrapper[4661]: E1001 07:20:10.484320 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4983853f6d1efea7c983752c4a740d0f43da6848180589bddffc98c22761a6bc\": container with ID starting with 4983853f6d1efea7c983752c4a740d0f43da6848180589bddffc98c22761a6bc not found: ID does not exist" containerID="4983853f6d1efea7c983752c4a740d0f43da6848180589bddffc98c22761a6bc" Oct 01 07:20:10 crc kubenswrapper[4661]: I1001 07:20:10.484343 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4983853f6d1efea7c983752c4a740d0f43da6848180589bddffc98c22761a6bc"} err="failed to get container status \"4983853f6d1efea7c983752c4a740d0f43da6848180589bddffc98c22761a6bc\": rpc error: code = NotFound desc = could not find container \"4983853f6d1efea7c983752c4a740d0f43da6848180589bddffc98c22761a6bc\": container with ID starting with 4983853f6d1efea7c983752c4a740d0f43da6848180589bddffc98c22761a6bc not found: ID does not exist" Oct 01 07:20:11 crc kubenswrapper[4661]: I1001 07:20:11.045296 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5cd8l" Oct 01 07:20:11 crc kubenswrapper[4661]: I1001 07:20:11.045382 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5cd8l" Oct 01 07:20:11 crc kubenswrapper[4661]: I1001 07:20:11.130930 4661 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5cd8l" Oct 01 07:20:11 crc kubenswrapper[4661]: I1001 07:20:11.470044 4661 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5cd8l" Oct 01 07:20:11 crc kubenswrapper[4661]: I1001 07:20:11.814591 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19a5b1df-d0d6-4e09-996f-c1e09431d83f" path="/var/lib/kubelet/pods/19a5b1df-d0d6-4e09-996f-c1e09431d83f/volumes" Oct 01 07:20:14 crc kubenswrapper[4661]: I1001 07:20:14.315225 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5cd8l"] Oct 01 07:20:14 crc kubenswrapper[4661]: I1001 07:20:14.316096 4661 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5cd8l" podUID="a80714b2-bdf4-470d-b4f4-2e23aef5c6ed" containerName="registry-server" containerID="cri-o://36c18ef6f135591d1eacdfdb8c1e20ebcde76c257297bd7a9a1a11970745e2f2" gracePeriod=2 Oct 01 07:20:14 crc kubenswrapper[4661]: I1001 07:20:14.849678 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5cd8l" Oct 01 07:20:14 crc kubenswrapper[4661]: I1001 07:20:14.943561 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mtvnn\" (UniqueName: \"kubernetes.io/projected/a80714b2-bdf4-470d-b4f4-2e23aef5c6ed-kube-api-access-mtvnn\") pod \"a80714b2-bdf4-470d-b4f4-2e23aef5c6ed\" (UID: \"a80714b2-bdf4-470d-b4f4-2e23aef5c6ed\") " Oct 01 07:20:14 crc kubenswrapper[4661]: I1001 07:20:14.943651 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a80714b2-bdf4-470d-b4f4-2e23aef5c6ed-catalog-content\") pod \"a80714b2-bdf4-470d-b4f4-2e23aef5c6ed\" (UID: \"a80714b2-bdf4-470d-b4f4-2e23aef5c6ed\") " Oct 01 07:20:14 crc kubenswrapper[4661]: I1001 07:20:14.943815 4661 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a80714b2-bdf4-470d-b4f4-2e23aef5c6ed-utilities\") pod \"a80714b2-bdf4-470d-b4f4-2e23aef5c6ed\" (UID: \"a80714b2-bdf4-470d-b4f4-2e23aef5c6ed\") " Oct 01 07:20:14 crc kubenswrapper[4661]: I1001 07:20:14.944584 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a80714b2-bdf4-470d-b4f4-2e23aef5c6ed-utilities" (OuterVolumeSpecName: "utilities") pod "a80714b2-bdf4-470d-b4f4-2e23aef5c6ed" (UID: "a80714b2-bdf4-470d-b4f4-2e23aef5c6ed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 07:20:14 crc kubenswrapper[4661]: I1001 07:20:14.952237 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a80714b2-bdf4-470d-b4f4-2e23aef5c6ed-kube-api-access-mtvnn" (OuterVolumeSpecName: "kube-api-access-mtvnn") pod "a80714b2-bdf4-470d-b4f4-2e23aef5c6ed" (UID: "a80714b2-bdf4-470d-b4f4-2e23aef5c6ed"). InnerVolumeSpecName "kube-api-access-mtvnn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 07:20:14 crc kubenswrapper[4661]: I1001 07:20:14.955783 4661 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a80714b2-bdf4-470d-b4f4-2e23aef5c6ed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a80714b2-bdf4-470d-b4f4-2e23aef5c6ed" (UID: "a80714b2-bdf4-470d-b4f4-2e23aef5c6ed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 07:20:15 crc kubenswrapper[4661]: I1001 07:20:15.046023 4661 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a80714b2-bdf4-470d-b4f4-2e23aef5c6ed-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 07:20:15 crc kubenswrapper[4661]: I1001 07:20:15.046071 4661 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a80714b2-bdf4-470d-b4f4-2e23aef5c6ed-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 07:20:15 crc kubenswrapper[4661]: I1001 07:20:15.046085 4661 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mtvnn\" (UniqueName: \"kubernetes.io/projected/a80714b2-bdf4-470d-b4f4-2e23aef5c6ed-kube-api-access-mtvnn\") on node \"crc\" DevicePath \"\"" Oct 01 07:20:15 crc kubenswrapper[4661]: I1001 07:20:15.452563 4661 generic.go:334] "Generic (PLEG): container finished" podID="a80714b2-bdf4-470d-b4f4-2e23aef5c6ed" containerID="36c18ef6f135591d1eacdfdb8c1e20ebcde76c257297bd7a9a1a11970745e2f2" exitCode=0 Oct 01 07:20:15 crc kubenswrapper[4661]: I1001 07:20:15.452626 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5cd8l" event={"ID":"a80714b2-bdf4-470d-b4f4-2e23aef5c6ed","Type":"ContainerDied","Data":"36c18ef6f135591d1eacdfdb8c1e20ebcde76c257297bd7a9a1a11970745e2f2"} Oct 01 07:20:15 crc kubenswrapper[4661]: I1001 07:20:15.452693 4661 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5cd8l" event={"ID":"a80714b2-bdf4-470d-b4f4-2e23aef5c6ed","Type":"ContainerDied","Data":"14c0651cc156d41633b56d77ca377992079c465bb590e69eb15be05d6ea33ee5"} Oct 01 07:20:15 crc kubenswrapper[4661]: I1001 07:20:15.452722 4661 scope.go:117] "RemoveContainer" containerID="36c18ef6f135591d1eacdfdb8c1e20ebcde76c257297bd7a9a1a11970745e2f2" Oct 01 07:20:15 crc kubenswrapper[4661]: I1001 07:20:15.452902 4661 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5cd8l" Oct 01 07:20:15 crc kubenswrapper[4661]: I1001 07:20:15.485150 4661 scope.go:117] "RemoveContainer" containerID="62fa5620e195b38806d254a433adcb331b4bec084c6315b60df3568d825ba370" Oct 01 07:20:15 crc kubenswrapper[4661]: I1001 07:20:15.502153 4661 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5cd8l"] Oct 01 07:20:15 crc kubenswrapper[4661]: I1001 07:20:15.510422 4661 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5cd8l"] Oct 01 07:20:15 crc kubenswrapper[4661]: I1001 07:20:15.535853 4661 scope.go:117] "RemoveContainer" containerID="1967fbd54ad64f7a943685bbce58d00da5f0e4da7fa1e3b89aea5ca8aa5bd8b0" Oct 01 07:20:15 crc kubenswrapper[4661]: I1001 07:20:15.589818 4661 scope.go:117] "RemoveContainer" containerID="36c18ef6f135591d1eacdfdb8c1e20ebcde76c257297bd7a9a1a11970745e2f2" Oct 01 07:20:15 crc kubenswrapper[4661]: E1001 07:20:15.590619 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36c18ef6f135591d1eacdfdb8c1e20ebcde76c257297bd7a9a1a11970745e2f2\": container with ID starting with 36c18ef6f135591d1eacdfdb8c1e20ebcde76c257297bd7a9a1a11970745e2f2 not found: ID does not exist" containerID="36c18ef6f135591d1eacdfdb8c1e20ebcde76c257297bd7a9a1a11970745e2f2" Oct 01 07:20:15 crc kubenswrapper[4661]: I1001 07:20:15.590692 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36c18ef6f135591d1eacdfdb8c1e20ebcde76c257297bd7a9a1a11970745e2f2"} err="failed to get container status \"36c18ef6f135591d1eacdfdb8c1e20ebcde76c257297bd7a9a1a11970745e2f2\": rpc error: code = NotFound desc = could not find container \"36c18ef6f135591d1eacdfdb8c1e20ebcde76c257297bd7a9a1a11970745e2f2\": container with ID starting with 36c18ef6f135591d1eacdfdb8c1e20ebcde76c257297bd7a9a1a11970745e2f2 not found: ID does not exist" Oct 01 07:20:15 crc kubenswrapper[4661]: I1001 07:20:15.590724 4661 scope.go:117] "RemoveContainer" containerID="62fa5620e195b38806d254a433adcb331b4bec084c6315b60df3568d825ba370" Oct 01 07:20:15 crc kubenswrapper[4661]: E1001 07:20:15.591262 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62fa5620e195b38806d254a433adcb331b4bec084c6315b60df3568d825ba370\": container with ID starting with 62fa5620e195b38806d254a433adcb331b4bec084c6315b60df3568d825ba370 not found: ID does not exist" containerID="62fa5620e195b38806d254a433adcb331b4bec084c6315b60df3568d825ba370" Oct 01 07:20:15 crc kubenswrapper[4661]: I1001 07:20:15.591301 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62fa5620e195b38806d254a433adcb331b4bec084c6315b60df3568d825ba370"} err="failed to get container status \"62fa5620e195b38806d254a433adcb331b4bec084c6315b60df3568d825ba370\": rpc error: code = NotFound desc = could not find container \"62fa5620e195b38806d254a433adcb331b4bec084c6315b60df3568d825ba370\": container with ID starting with 62fa5620e195b38806d254a433adcb331b4bec084c6315b60df3568d825ba370 not found: ID does not exist" Oct 01 07:20:15 crc kubenswrapper[4661]: I1001 07:20:15.591327 4661 scope.go:117] "RemoveContainer" containerID="1967fbd54ad64f7a943685bbce58d00da5f0e4da7fa1e3b89aea5ca8aa5bd8b0" Oct 01 07:20:15 crc kubenswrapper[4661]: E1001 07:20:15.592046 4661 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1967fbd54ad64f7a943685bbce58d00da5f0e4da7fa1e3b89aea5ca8aa5bd8b0\": container with ID starting with 1967fbd54ad64f7a943685bbce58d00da5f0e4da7fa1e3b89aea5ca8aa5bd8b0 not found: ID does not exist" containerID="1967fbd54ad64f7a943685bbce58d00da5f0e4da7fa1e3b89aea5ca8aa5bd8b0" Oct 01 07:20:15 crc kubenswrapper[4661]: I1001 07:20:15.592107 4661 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1967fbd54ad64f7a943685bbce58d00da5f0e4da7fa1e3b89aea5ca8aa5bd8b0"} err="failed to get container status \"1967fbd54ad64f7a943685bbce58d00da5f0e4da7fa1e3b89aea5ca8aa5bd8b0\": rpc error: code = NotFound desc = could not find container \"1967fbd54ad64f7a943685bbce58d00da5f0e4da7fa1e3b89aea5ca8aa5bd8b0\": container with ID starting with 1967fbd54ad64f7a943685bbce58d00da5f0e4da7fa1e3b89aea5ca8aa5bd8b0 not found: ID does not exist" Oct 01 07:20:15 crc kubenswrapper[4661]: I1001 07:20:15.771189 4661 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a80714b2-bdf4-470d-b4f4-2e23aef5c6ed" path="/var/lib/kubelet/pods/a80714b2-bdf4-470d-b4f4-2e23aef5c6ed/volumes" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515067153140024447 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015067153141017365 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015067135471016516 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015067135471015466 5ustar corecore